[ 606.527857] env[62820]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62820) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.528206] env[62820]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62820) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.528321] env[62820]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62820) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.528663] env[62820]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 606.623883] env[62820]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62820) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 606.634308] env[62820]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62820) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 607.242440] env[62820]: INFO nova.virt.driver [None req-b1012680-cc11-4fe1-932d-97566cd57dae None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 607.312406] env[62820]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.312565] env[62820]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.312665] env[62820]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62820) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 610.664389] env[62820]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-43d8221f-7d3c-4538-af78-6dbd960d9080 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.681252] env[62820]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62820) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 610.681429] env[62820]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-d9c4f509-7e2a-463f-a8b2-f6842ef50132 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.706582] env[62820]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 9e34e. [ 610.706814] env[62820]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.394s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.707464] env[62820]: INFO nova.virt.vmwareapi.driver [None req-b1012680-cc11-4fe1-932d-97566cd57dae None None] VMware vCenter version: 7.0.3 [ 610.711041] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44b18a8-b1ca-45da-8717-fe412d0d8d82 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.730699] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ba280d-6169-4b30-a25e-1017fd7ef1c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.736989] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ab3733-3671-4965-8472-ad15d4997137 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.743867] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9dd017-f2bc-449f-8017-7bd76ab5bdc6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.757517] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e201f3-137b-4b80-a856-dff3b2828596 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.763681] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcc0ac7-4324-4046-b2e4-ce338ca16c9f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.794111] env[62820]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-f55e34a0-5ab3-437f-a900-ae83f2b6bfec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.800105] env[62820]: DEBUG nova.virt.vmwareapi.driver [None req-b1012680-cc11-4fe1-932d-97566cd57dae None None] Extension org.openstack.compute already exists. {{(pid=62820) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 610.802862] env[62820]: INFO nova.compute.provider_config [None req-b1012680-cc11-4fe1-932d-97566cd57dae None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 611.306612] env[62820]: DEBUG nova.context [None req-b1012680-cc11-4fe1-932d-97566cd57dae None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),d33e4061-780d-4733-9e06-e51a50fa3ab5(cell1) {{(pid=62820) load_cells /opt/stack/nova/nova/context.py:464}} [ 611.308804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.309083] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.309776] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.310222] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Acquiring lock "d33e4061-780d-4733-9e06-e51a50fa3ab5" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.310479] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Lock "d33e4061-780d-4733-9e06-e51a50fa3ab5" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.311523] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Lock "d33e4061-780d-4733-9e06-e51a50fa3ab5" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.333092] env[62820]: INFO dbcounter [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Registered counter for database nova_cell0 [ 611.341180] env[62820]: INFO dbcounter [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Registered counter for database nova_cell1 [ 611.344230] env[62820]: DEBUG oslo_db.sqlalchemy.engines [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62820) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 611.344851] env[62820]: DEBUG oslo_db.sqlalchemy.engines [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62820) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 611.349461] env[62820]: ERROR nova.db.main.api [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.349461] env[62820]: result = function(*args, **kwargs) [ 611.349461] env[62820]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.349461] env[62820]: return func(*args, **kwargs) [ 611.349461] env[62820]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 611.349461] env[62820]: result = fn(*args, **kwargs) [ 611.349461] env[62820]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 611.349461] env[62820]: return f(*args, **kwargs) [ 611.349461] env[62820]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 611.349461] env[62820]: return db.service_get_minimum_version(context, binaries) [ 611.349461] env[62820]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 611.349461] env[62820]: _check_db_access() [ 611.349461] env[62820]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 611.349461] env[62820]: stacktrace = ''.join(traceback.format_stack()) [ 611.349461] env[62820]: [ 611.350273] env[62820]: ERROR nova.db.main.api [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.350273] env[62820]: result = function(*args, **kwargs) [ 611.350273] env[62820]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.350273] env[62820]: return func(*args, **kwargs) [ 611.350273] env[62820]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 611.350273] env[62820]: result = fn(*args, **kwargs) [ 611.350273] env[62820]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 611.350273] env[62820]: return f(*args, **kwargs) [ 611.350273] env[62820]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 611.350273] env[62820]: return db.service_get_minimum_version(context, binaries) [ 611.350273] env[62820]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 611.350273] env[62820]: _check_db_access() [ 611.350273] env[62820]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 611.350273] env[62820]: stacktrace = ''.join(traceback.format_stack()) [ 611.350273] env[62820]: [ 611.350639] env[62820]: WARNING nova.objects.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 611.350799] env[62820]: WARNING nova.objects.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Failed to get minimum service version for cell d33e4061-780d-4733-9e06-e51a50fa3ab5 [ 611.351235] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Acquiring lock "singleton_lock" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.351396] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Acquired lock "singleton_lock" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.351626] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Releasing lock "singleton_lock" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.351941] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Full set of CONF: {{(pid=62820) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 611.352097] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ******************************************************************************** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 611.352227] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] Configuration options gathered from: {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 611.352367] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 611.352557] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 611.352684] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ================================================================================ {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 611.352951] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] allow_resize_to_same_host = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.353152] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] arq_binding_timeout = 300 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.353286] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] backdoor_port = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.353416] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] backdoor_socket = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.353583] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] block_device_allocate_retries = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.353745] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] block_device_allocate_retries_interval = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.353912] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cert = self.pem {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.354089] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.354260] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute_monitors = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.354427] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] config_dir = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.354597] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] config_drive_format = iso9660 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.354733] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.354893] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] config_source = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.355071] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] console_host = devstack {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.355240] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] control_exchange = nova {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.355400] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cpu_allocation_ratio = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.355562] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] daemon = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.355728] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] debug = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.355890] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] default_access_ip_network_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.356067] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] default_availability_zone = nova {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.356226] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] default_ephemeral_format = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.356386] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] default_green_pool_size = 1000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.356624] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.356789] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] default_schedule_zone = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.356946] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] disk_allocation_ratio = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.357122] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] enable_new_services = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.357301] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] enabled_apis = ['osapi_compute'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.357463] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] enabled_ssl_apis = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.357638] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] flat_injected = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.357805] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] force_config_drive = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.357965] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] force_raw_images = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.358148] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] graceful_shutdown_timeout = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.358308] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] heal_instance_info_cache_interval = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.358522] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] host = cpu-1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.358714] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.358882] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.359054] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.359276] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.359437] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] instance_build_timeout = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.359598] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] instance_delete_interval = 300 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.359764] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] instance_format = [instance: %(uuid)s] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.359924] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] instance_name_template = instance-%08x {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.360094] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] instance_usage_audit = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.360266] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] instance_usage_audit_period = month {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.360430] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.360598] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.360765] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] internal_service_availability_zone = internal {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.360924] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] key = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.361097] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] live_migration_retry_count = 30 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.361268] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_color = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.361432] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_config_append = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.361601] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.361762] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_dir = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.361920] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_file = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.362059] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_options = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.362228] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_rotate_interval = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.362396] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_rotate_interval_type = days {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.362561] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] log_rotation_type = none {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.362713] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.362856] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.363039] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.363210] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.363341] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.363510] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] long_rpc_timeout = 1800 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.363673] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] max_concurrent_builds = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.363837] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] max_concurrent_live_migrations = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.363996] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] max_concurrent_snapshots = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.364168] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] max_local_block_devices = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.364328] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] max_logfile_count = 30 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.364487] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] max_logfile_size_mb = 200 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.364649] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] maximum_instance_delete_attempts = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.364816] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] metadata_listen = 0.0.0.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.364986] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] metadata_listen_port = 8775 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.365172] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] metadata_workers = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.365336] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] migrate_max_retries = -1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.365502] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] mkisofs_cmd = genisoimage {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.365708] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.365842] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] my_ip = 10.180.1.21 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.366054] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.366219] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] network_allocate_retries = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.366393] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.366558] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.366718] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] osapi_compute_listen_port = 8774 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.366884] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] osapi_compute_unique_server_name_scope = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.367064] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] osapi_compute_workers = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.367231] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] password_length = 12 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.367393] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] periodic_enable = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.367550] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] periodic_fuzzy_delay = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.367749] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] pointer_model = usbtablet {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.367941] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] preallocate_images = none {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.368135] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] publish_errors = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.368268] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] pybasedir = /opt/stack/nova {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.368425] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ram_allocation_ratio = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.368600] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] rate_limit_burst = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.368791] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] rate_limit_except_level = CRITICAL {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.368954] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] rate_limit_interval = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.369131] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] reboot_timeout = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.369295] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] reclaim_instance_interval = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.369450] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] record = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.369618] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] reimage_timeout_per_gb = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.369786] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] report_interval = 120 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.369950] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] rescue_timeout = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.370125] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] reserved_host_cpus = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.370284] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] reserved_host_disk_mb = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.370444] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] reserved_host_memory_mb = 512 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.370602] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] reserved_huge_pages = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.370761] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] resize_confirm_window = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.370917] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] resize_fs_using_block_device = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.371084] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] resume_guests_state_on_host_boot = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.371256] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.371413] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] rpc_response_timeout = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.371571] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] run_external_periodic_tasks = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.371738] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] running_deleted_instance_action = reap {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.371897] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.372061] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] running_deleted_instance_timeout = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.372224] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler_instance_sync_interval = 120 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.372393] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_down_time = 720 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.372564] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] servicegroup_driver = db {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.372712] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] shell_completion = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.372875] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] shelved_offload_time = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.373048] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] shelved_poll_interval = 3600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.373221] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] shutdown_timeout = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.373381] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] source_is_ipv6 = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.373540] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ssl_only = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.373799] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.373970] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] sync_power_state_interval = 600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.374148] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] sync_power_state_pool_size = 1000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.374313] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] syslog_log_facility = LOG_USER {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.374469] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] tempdir = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.374629] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] timeout_nbd = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.374822] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] transport_url = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.374993] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] update_resources_interval = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.375168] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] use_cow_images = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.375329] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] use_eventlog = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.375485] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] use_journal = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.375644] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] use_json = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.375802] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] use_rootwrap_daemon = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.375958] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] use_stderr = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.376127] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] use_syslog = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.376284] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vcpu_pin_set = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.376450] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plugging_is_fatal = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.376617] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plugging_timeout = 300 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.376781] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] virt_mkfs = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.376943] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] volume_usage_poll_interval = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.377117] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] watch_log_file = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.377282] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] web = /usr/share/spice-html5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 611.377462] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.377661] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.377829] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.377997] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_concurrency.disable_process_locking = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.378545] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.378777] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.378956] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.379149] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.379325] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.379495] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.379681] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.auth_strategy = keystone {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.379854] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.compute_link_prefix = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.380074] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.380262] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.dhcp_domain = novalocal {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.380433] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.enable_instance_password = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.380603] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.glance_link_prefix = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.380775] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.380951] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.381129] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.instance_list_per_project_cells = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.381296] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.list_records_by_skipping_down_cells = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.381459] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.local_metadata_per_cell = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.381630] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.max_limit = 1000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.381797] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.metadata_cache_expiration = 15 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.381971] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.neutron_default_tenant_id = default {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.382159] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.response_validation = warn {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.382328] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.use_neutron_default_nets = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.382495] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.382658] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.382828] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.383013] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.383188] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.vendordata_dynamic_targets = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.383349] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.vendordata_jsonfile_path = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.383527] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.383721] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.backend = dogpile.cache.memcached {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.383891] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.backend_argument = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.384069] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.backend_expiration_time = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.384243] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.config_prefix = cache.oslo {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.384412] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.dead_timeout = 60.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.384576] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.debug_cache_backend = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.384737] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.enable_retry_client = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.384897] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.enable_socket_keepalive = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.385079] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.enabled = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.385245] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.enforce_fips_mode = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.385408] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.expiration_time = 600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.385570] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.hashclient_retry_attempts = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.385736] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.385894] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_dead_retry = 300 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.386072] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_password = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.386238] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.386400] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.386560] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_pool_maxsize = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.386739] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.386924] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_sasl_enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.387117] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.387287] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.387446] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.memcache_username = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.387639] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.proxies = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.387812] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.redis_db = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.387973] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.redis_password = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.388163] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.388340] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.388512] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.redis_server = localhost:6379 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.388686] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.redis_socket_timeout = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.388840] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.redis_username = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.389010] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.retry_attempts = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.389183] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.retry_delay = 0.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.389347] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.socket_keepalive_count = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.389510] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.socket_keepalive_idle = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.389670] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.socket_keepalive_interval = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.389829] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.tls_allowed_ciphers = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.389985] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.tls_cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.390156] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.tls_certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.390318] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.tls_enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.390474] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cache.tls_keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.390644] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.390819] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.auth_type = password {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.390980] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.391171] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.391335] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.391498] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.391661] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.cross_az_attach = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.391826] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.debug = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.391987] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.endpoint_template = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.392190] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.http_retries = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.392357] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.392519] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.392693] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.os_region_name = RegionOne {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.392860] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.393035] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cinder.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.393212] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.393375] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.cpu_dedicated_set = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.393533] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.cpu_shared_set = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.393699] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.image_type_exclude_list = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.393867] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.394058] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.394216] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.394379] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.394549] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.394713] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.resource_provider_association_refresh = 300 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.394875] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.395045] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.shutdown_retry_interval = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.395228] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.395407] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] conductor.workers = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.395586] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] console.allowed_origins = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.395750] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] console.ssl_ciphers = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.395921] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] console.ssl_minimum_version = default {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.396106] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] consoleauth.enforce_session_timeout = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.396468] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] consoleauth.token_ttl = 600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.396468] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.396612] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.396778] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.396937] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.connect_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.397112] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.connect_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.397275] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.endpoint_override = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.397438] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.397602] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.397759] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.max_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.397920] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.min_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.398090] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.region_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.398254] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.retriable_status_codes = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.398414] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.398599] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.service_type = accelerator {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.398796] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.398947] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.status_code_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.399119] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.status_code_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.399283] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.399466] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.399629] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] cyborg.version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.399810] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.backend = sqlalchemy {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.399983] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.connection = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.400164] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.connection_debug = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.400336] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.connection_parameters = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.400503] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.connection_recycle_time = 3600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.400666] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.connection_trace = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.400833] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.db_inc_retry_interval = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.400995] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.db_max_retries = 20 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.401174] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.db_max_retry_interval = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.401338] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.db_retry_interval = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.401502] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.max_overflow = 50 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.401663] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.max_pool_size = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.401828] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.max_retries = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.401999] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.402175] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.mysql_wsrep_sync_wait = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.402337] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.pool_timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.402501] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.retry_interval = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.402660] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.slave_connection = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.402823] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.sqlite_synchronous = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.402984] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] database.use_db_reconnect = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.403176] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.backend = sqlalchemy {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.403344] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.connection = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.403510] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.connection_debug = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.403679] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.connection_parameters = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.403844] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.connection_recycle_time = 3600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.404013] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.connection_trace = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.404186] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.db_inc_retry_interval = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.404352] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.db_max_retries = 20 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.404519] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.db_max_retry_interval = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.404684] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.db_retry_interval = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.404849] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.max_overflow = 50 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.405026] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.max_pool_size = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.405196] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.max_retries = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.405367] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.405529] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.405689] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.pool_timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.405854] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.retry_interval = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.406024] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.slave_connection = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.406194] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] api_database.sqlite_synchronous = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.406370] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] devices.enabled_mdev_types = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.406548] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.406724] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.406889] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ephemeral_storage_encryption.enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.407065] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.407238] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.api_servers = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.407402] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.407607] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.407768] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.407947] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.connect_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.408123] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.connect_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.408290] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.debug = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.408457] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.default_trusted_certificate_ids = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.408646] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.enable_certificate_validation = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.408831] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.enable_rbd_download = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.408996] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.endpoint_override = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.409177] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.409340] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.409499] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.max_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.409655] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.min_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.409818] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.num_retries = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.410022] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.rbd_ceph_conf = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.410202] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.rbd_connect_timeout = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.410376] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.rbd_pool = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.410544] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.rbd_user = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.410706] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.region_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.410870] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.retriable_status_codes = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.411039] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.411222] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.service_type = image {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.411385] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.411545] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.status_code_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.411705] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.status_code_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.411866] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.412060] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.412233] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.verify_glance_signatures = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.412394] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] glance.version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.412561] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] guestfs.debug = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.412728] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.412891] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.auth_type = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.413083] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.413228] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.413390] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.413551] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.connect_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.413710] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.connect_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.413870] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.endpoint_override = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.414074] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.414249] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.414410] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.max_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.414569] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.min_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.414731] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.region_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.414889] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.retriable_status_codes = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.415059] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.415237] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.service_type = shared-file-system {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.415407] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.share_apply_policy_timeout = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.415574] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.415738] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.status_code_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.415896] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.status_code_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.416067] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.416251] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.416414] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] manila.version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.416582] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] mks.enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.416938] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.417143] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] image_cache.manager_interval = 2400 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.417317] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] image_cache.precache_concurrency = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.417488] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] image_cache.remove_unused_base_images = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.417687] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.417862] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.418054] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] image_cache.subdirectory_name = _base {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.418239] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.api_max_retries = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.418408] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.api_retry_interval = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.418575] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.418742] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.auth_type = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.418902] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.419073] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.419241] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.419404] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.conductor_group = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.419567] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.connect_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.419727] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.connect_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.419894] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.endpoint_override = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.420088] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.420252] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.420413] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.max_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.420572] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.min_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.420739] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.peer_list = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.420897] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.region_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.421067] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.retriable_status_codes = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.421236] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.serial_console_state_timeout = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.421396] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.421572] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.service_type = baremetal {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.421733] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.shard = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.421898] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.422070] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.status_code_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.422236] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.status_code_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.422462] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.422664] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.422831] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ironic.version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.423029] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.423214] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] key_manager.fixed_key = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.423401] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.423566] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.barbican_api_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.423728] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.barbican_endpoint = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.423904] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.barbican_endpoint_type = public {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.424074] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.barbican_region_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.424239] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.424401] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.424563] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.424725] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.424898] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.425198] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.number_of_retries = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.425402] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.retry_delay = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.425579] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.send_service_user_token = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.425748] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.425912] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.426090] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.verify_ssl = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.426257] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican.verify_ssl_path = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.426430] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.426592] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.auth_type = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.426753] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.426912] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.427089] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.427257] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.427416] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.427628] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.427773] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] barbican_service_user.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.427946] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.approle_role_id = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.428121] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.approle_secret_id = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.428297] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.kv_mountpoint = secret {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.428459] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.kv_path = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.428626] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.kv_version = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.428788] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.namespace = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.428949] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.root_token_id = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.429124] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.ssl_ca_crt_file = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.429296] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.timeout = 60.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.429458] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.use_ssl = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.429631] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.429803] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.429991] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.430183] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.430350] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.connect_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.430511] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.connect_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.430670] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.endpoint_override = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.430833] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.430993] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.431165] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.max_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.431323] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.min_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.431482] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.region_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.431640] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.retriable_status_codes = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.431801] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.431970] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.service_type = identity {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.432148] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.432313] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.status_code_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.432472] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.status_code_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.432631] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.432810] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.432969] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] keystone.version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.433183] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.connection_uri = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.433350] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.cpu_mode = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.433516] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.433685] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.cpu_models = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.433855] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.cpu_power_governor_high = performance {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.434042] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.434213] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.cpu_power_management = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.434393] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.434554] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.device_detach_attempts = 8 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.434713] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.device_detach_timeout = 20 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.434879] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.disk_cachemodes = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.435054] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.disk_prefix = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.435225] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.enabled_perf_events = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.435395] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.file_backed_memory = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.435564] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.gid_maps = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.435725] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.hw_disk_discard = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.435888] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.hw_machine_type = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.436071] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.images_rbd_ceph_conf = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.436239] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.436403] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.436570] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.images_rbd_glance_store_name = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.436742] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.images_rbd_pool = rbd {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.436909] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.images_type = default {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.437078] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.images_volume_group = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.437244] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.inject_key = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.437405] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.inject_partition = -2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.437575] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.inject_password = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.437758] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.iscsi_iface = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.437924] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.iser_use_multipath = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.438099] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.438264] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.438425] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_downtime = 500 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.438597] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.438775] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.438937] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_inbound_addr = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.439112] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.439274] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.439434] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_scheme = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.439606] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_timeout_action = abort {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.439772] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_tunnelled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.439952] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_uri = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.440143] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.live_migration_with_native_tls = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.440307] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.max_queues = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.440471] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.440707] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.440874] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.nfs_mount_options = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.441200] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.441387] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.441557] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.441722] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.441891] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.442068] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.num_pcie_ports = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.442239] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.442405] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.pmem_namespaces = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.442566] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.quobyte_client_cfg = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.442868] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.443060] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.443230] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.443394] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.443554] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rbd_secret_uuid = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.443713] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rbd_user = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.443880] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.444063] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.444226] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rescue_image_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.444382] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rescue_kernel_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.444568] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rescue_ramdisk_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.444726] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.444903] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.rx_queue_size = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.445088] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.smbfs_mount_options = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.445389] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.445577] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.snapshot_compression = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.445739] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.snapshot_image_format = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.445969] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.446149] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.sparse_logical_volumes = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.446314] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.swtpm_enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.446483] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.swtpm_group = tss {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.446651] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.swtpm_user = tss {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.446823] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.sysinfo_serial = unique {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.446981] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.tb_cache_size = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.447153] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.tx_queue_size = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.447318] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.uid_maps = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.447481] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.use_virtio_for_bridges = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.447676] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.virt_type = kvm {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.447856] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.volume_clear = zero {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.448030] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.volume_clear_size = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.448202] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.volume_use_multipath = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.448362] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.vzstorage_cache_path = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.448534] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.448725] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.448897] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.449086] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.449376] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.449566] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.vzstorage_mount_user = stack {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.449738] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.449917] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.450107] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.auth_type = password {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.450275] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.450437] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.450602] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.450764] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.connect_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.450923] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.connect_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.451107] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.default_floating_pool = public {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.451269] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.endpoint_override = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.451431] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.extension_sync_interval = 600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.451592] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.http_retries = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.451759] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.451921] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.452094] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.max_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.452270] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.452429] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.min_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.452599] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.ovs_bridge = br-int {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.452769] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.physnets = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.452939] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.region_name = RegionOne {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.453111] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.retriable_status_codes = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.453283] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.service_metadata_proxy = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.453445] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.453615] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.service_type = network {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.453781] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.453943] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.status_code_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.454112] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.status_code_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.454271] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.454450] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.454616] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] neutron.version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.454789] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] notifications.bdms_in_notifications = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.455038] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] notifications.default_level = INFO {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.455241] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] notifications.notification_format = unversioned {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.455413] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] notifications.notify_on_state_change = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.455607] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.455783] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] pci.alias = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.455954] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] pci.device_spec = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.456694] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] pci.report_in_placement = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.456694] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.456694] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.auth_type = password {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.456897] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.457106] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.457288] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.457467] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.457663] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.connect_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.457850] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.connect_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.458023] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.default_domain_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.458209] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.default_domain_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.458389] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.domain_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.458550] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.domain_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.458755] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.endpoint_override = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.458943] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.459131] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.459318] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.max_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.459480] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.min_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.459669] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.password = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.459847] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.project_domain_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.460043] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.project_domain_name = Default {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.460217] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.project_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.460407] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.project_name = service {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.460594] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.region_name = RegionOne {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.460782] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.retriable_status_codes = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.460950] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.461134] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.service_type = placement {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.461302] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.461463] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.status_code_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.461631] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.status_code_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.461788] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.system_scope = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.461949] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.462127] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.trust_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.462289] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.user_domain_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.462461] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.user_domain_name = Default {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.462623] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.user_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.462801] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.username = nova {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.462982] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.463159] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] placement.version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.463343] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.cores = 20 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.463510] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.count_usage_from_placement = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.463683] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.463853] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.injected_file_content_bytes = 10240 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.464029] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.injected_file_path_length = 255 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.464202] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.injected_files = 5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.464373] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.instances = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.464541] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.key_pairs = 100 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.464711] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.metadata_items = 128 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.464884] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.ram = 51200 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.465058] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.recheck_quota = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.465230] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.server_group_members = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.465396] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.server_groups = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.465626] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.465817] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] quota.unified_limits_resource_strategy = require {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.465996] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.466175] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.466338] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.image_metadata_prefilter = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.466499] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.466664] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.max_attempts = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.466830] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.max_placement_results = 1000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.466993] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.467171] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.467335] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.467510] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] scheduler.workers = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.467727] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.467906] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.468101] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.468273] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.468439] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.468630] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.468806] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.469012] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.469194] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.host_subset_size = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.469361] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.469524] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.469688] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.469855] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.isolated_hosts = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.470030] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.isolated_images = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.470201] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.470366] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.470529] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.470691] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.pci_in_placement = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.470852] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.471026] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.471212] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.471378] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.471542] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.471705] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.471870] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.track_instance_changes = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.472059] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.472234] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] metrics.required = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.472398] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] metrics.weight_multiplier = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.472839] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.472839] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] metrics.weight_setting = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.473033] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.473215] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] serial_console.enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.473395] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] serial_console.port_range = 10000:20000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.473571] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.473743] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.473912] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] serial_console.serialproxy_port = 6083 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.474091] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.474267] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.auth_type = password {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.474427] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.474584] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.474750] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.474909] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.475077] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.475252] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.send_service_user_token = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.475416] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.475582] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] service_user.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.475774] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.agent_enabled = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.475939] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.476269] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.476475] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.476650] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.html5proxy_port = 6082 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.476815] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.image_compression = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.476978] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.jpeg_compression = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.477152] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.playback_compression = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.477316] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.require_secure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.477487] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.server_listen = 127.0.0.1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.477685] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.478029] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.478216] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.streaming_mode = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.478382] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] spice.zlib_compression = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.478557] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] upgrade_levels.baseapi = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.478729] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] upgrade_levels.compute = auto {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.478894] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] upgrade_levels.conductor = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.479076] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] upgrade_levels.scheduler = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.479253] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.479420] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.479580] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.479745] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.479910] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.480082] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.480245] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.480409] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.480570] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vendordata_dynamic_auth.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.480748] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.api_retry_count = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.480908] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.ca_file = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.481099] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.481272] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.cluster_name = testcl1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.481439] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.connection_pool_size = 10 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.481599] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.console_delay_seconds = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.481770] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.datastore_regex = ^datastore.* {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.481979] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.482172] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.host_password = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.482342] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.host_port = 443 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.482513] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.host_username = administrator@vsphere.local {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.482683] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.insecure = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.482849] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.integration_bridge = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.483024] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.maximum_objects = 100 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.483190] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.pbm_default_policy = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.483353] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.pbm_enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.483511] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.pbm_wsdl_location = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.483681] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.483840] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.serial_port_proxy_uri = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.484000] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.serial_port_service_uri = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.484180] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.task_poll_interval = 0.5 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.484353] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.use_linked_clone = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.484524] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.vnc_keymap = en-us {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.484690] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.vnc_port = 5900 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.484855] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vmware.vnc_port_total = 10000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.485049] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.auth_schemes = ['none'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.485229] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.485519] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.485734] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.485913] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.novncproxy_port = 6080 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.486146] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.server_listen = 127.0.0.1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.486379] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.486572] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.vencrypt_ca_certs = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.486749] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.vencrypt_client_cert = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.486913] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vnc.vencrypt_client_key = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.487106] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.487276] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.disable_deep_image_inspection = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.487438] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.487636] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.487787] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.487950] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.disable_rootwrap = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.488129] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.enable_numa_live_migration = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.488291] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.488453] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.488613] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.488809] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.libvirt_disable_apic = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.488942] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.489118] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.489286] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.489447] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.489610] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.489774] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.489939] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.490108] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.490270] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.490434] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.490620] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.490795] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.client_socket_timeout = 900 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.490962] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.default_pool_size = 1000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.491143] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.keep_alive = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.491310] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.max_header_line = 16384 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.491473] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.491637] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.ssl_ca_file = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.491795] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.ssl_cert_file = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.491956] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.ssl_key_file = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.492133] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.tcp_keepidle = 600 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.492313] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.492477] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] zvm.ca_file = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.492642] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] zvm.cloud_connector_url = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.492947] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.493136] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] zvm.reachable_timeout = 300 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.493313] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.493490] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.493670] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.connection_string = messaging:// {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.493840] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.enabled = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.494017] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.es_doc_type = notification {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.494189] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.es_scroll_size = 10000 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.494359] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.es_scroll_time = 2m {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.494521] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.filter_error_trace = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.494688] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.hmac_keys = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.494858] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.sentinel_service_name = mymaster {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.495069] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.socket_timeout = 0.1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.495199] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.trace_requests = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.495358] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler.trace_sqlalchemy = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.495543] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler_jaeger.process_tags = {} {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.495731] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler_jaeger.service_name_prefix = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.495905] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] profiler_otlp.service_name_prefix = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.496086] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] remote_debug.host = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.496247] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] remote_debug.port = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.496430] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.496597] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.496764] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.496928] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.497102] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.497268] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.497431] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.497612] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.497795] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.497972] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.498151] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.498328] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.498496] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.498672] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.498846] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.499033] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.499205] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.499383] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.499547] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.499712] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.499883] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.500062] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.500233] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.500407] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.500570] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.500735] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.500907] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.501086] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.501257] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.501427] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.ssl = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.501602] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.501779] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.501940] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.502128] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.502304] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.502469] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.502661] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.502834] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_notifications.retry = -1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.503056] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.503252] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.503436] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.auth_section = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.503605] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.auth_type = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.503767] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.cafile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.503926] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.certfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.504106] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.collect_timing = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.504267] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.connect_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.504451] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.connect_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.504585] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.endpoint_id = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.504760] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.endpoint_interface = publicURL {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.504917] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.endpoint_override = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.505084] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.endpoint_region_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.505246] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.endpoint_service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.505403] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.endpoint_service_type = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.505566] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.insecure = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.505750] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.keyfile = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.505912] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.max_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.506084] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.min_version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.506247] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.region_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.506407] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.retriable_status_codes = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.506565] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.service_name = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.506723] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.service_type = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.506885] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.split_loggers = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.507052] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.status_code_retries = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.507213] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.status_code_retry_delay = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.507372] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.timeout = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.507531] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.valid_interfaces = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.507729] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_limit.version = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.507899] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_reports.file_event_handler = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.508080] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.508297] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] oslo_reports.log_dir = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.508480] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.508647] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.508813] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.508979] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.509161] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.509325] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.509495] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.509655] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_ovs_privileged.group = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.509824] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.509986] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.510188] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.510364] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] vif_plug_ovs_privileged.user = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.510540] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.510748] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.510990] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.511195] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.511372] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.511543] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.511712] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.511880] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.512082] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.512264] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_ovs.isolate_vif = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.512439] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.512607] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.512782] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.512956] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.513134] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] os_vif_ovs.per_port_bridge = False {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.513309] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] privsep_osbrick.capabilities = [21] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.513472] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] privsep_osbrick.group = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.513633] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] privsep_osbrick.helper_command = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.513804] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.513970] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.514145] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] privsep_osbrick.user = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.514319] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.514477] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] nova_sys_admin.group = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.514635] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] nova_sys_admin.helper_command = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.514801] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.514965] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.515139] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] nova_sys_admin.user = None {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 611.515295] env[62820]: DEBUG oslo_service.service [None req-e6e79a05-a29b-4384-84a1-b0d12ca9d606 None None] ******************************************************************************** {{(pid=62820) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 611.515727] env[62820]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 612.019259] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Getting list of instances from cluster (obj){ [ 612.019259] env[62820]: value = "domain-c8" [ 612.019259] env[62820]: _type = "ClusterComputeResource" [ 612.019259] env[62820]: } {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 612.020518] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6687490b-7759-4963-8289-d01c0b9f16a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.030117] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Got total of 0 instances {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 612.030706] env[62820]: WARNING nova.virt.vmwareapi.driver [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 612.031187] env[62820]: INFO nova.virt.node [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Generated node identity 8a0693d4-1456-4a04-ae15-b1eaea0edd7a [ 612.031412] env[62820]: INFO nova.virt.node [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Wrote node identity 8a0693d4-1456-4a04-ae15-b1eaea0edd7a to /opt/stack/data/n-cpu-1/compute_id [ 612.534643] env[62820]: WARNING nova.compute.manager [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Compute nodes ['8a0693d4-1456-4a04-ae15-b1eaea0edd7a'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 613.542957] env[62820]: INFO nova.compute.manager [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 614.548302] env[62820]: WARNING nova.compute.manager [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 614.548587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.548848] env[62820]: DEBUG oslo_concurrency.lockutils [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.549013] env[62820]: DEBUG oslo_concurrency.lockutils [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.549179] env[62820]: DEBUG nova.compute.resource_tracker [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 614.550097] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c77d49b-1c1e-4650-931f-939261d5681a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.558309] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504b20ac-9357-4640-851b-6e21757c194e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.571824] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a366b241-b3d0-484a-8250-99673844bd05 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.578117] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b082a4a6-d06e-4699-a3b2-beb8ada56b4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.607154] env[62820]: DEBUG nova.compute.resource_tracker [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181421MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 614.607289] env[62820]: DEBUG oslo_concurrency.lockutils [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.607484] env[62820]: DEBUG oslo_concurrency.lockutils [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.109651] env[62820]: WARNING nova.compute.resource_tracker [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] No compute node record for cpu-1:8a0693d4-1456-4a04-ae15-b1eaea0edd7a: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 8a0693d4-1456-4a04-ae15-b1eaea0edd7a could not be found. [ 615.613477] env[62820]: INFO nova.compute.resource_tracker [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a [ 617.121127] env[62820]: DEBUG nova.compute.resource_tracker [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 617.121518] env[62820]: DEBUG nova.compute.resource_tracker [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 617.280535] env[62820]: INFO nova.scheduler.client.report [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] [req-7971f567-07f6-4f9a-a2ac-95f6186cfadb] Created resource provider record via placement API for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 617.297508] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3681e3b-20ac-4c6b-8a8e-b2a7e7e4654e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.305095] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f458a8a-f1b5-4bdb-ae31-a09718ae1021 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.333401] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87888cd5-78bb-43b1-9596-d039ea06d4e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.340118] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed28e12d-a6bc-48b2-9b07-1c88d82c9804 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.353233] env[62820]: DEBUG nova.compute.provider_tree [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 617.888121] env[62820]: DEBUG nova.scheduler.client.report [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 617.888359] env[62820]: DEBUG nova.compute.provider_tree [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 0 to 1 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 617.888500] env[62820]: DEBUG nova.compute.provider_tree [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 617.938692] env[62820]: DEBUG nova.compute.provider_tree [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 1 to 2 during operation: update_traits {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 618.443399] env[62820]: DEBUG nova.compute.resource_tracker [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 618.443728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.836s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.443728] env[62820]: DEBUG nova.service [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Creating RPC server for service compute {{(pid=62820) start /opt/stack/nova/nova/service.py:186}} [ 618.457846] env[62820]: DEBUG nova.service [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] Join ServiceGroup membership for this service compute {{(pid=62820) start /opt/stack/nova/nova/service.py:203}} [ 618.458081] env[62820]: DEBUG nova.servicegroup.drivers.db [None req-54277776-5740-45fa-ae71-2b49ed572e08 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62820) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 669.460982] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.461417] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.461466] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 669.461598] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 669.965083] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 669.965391] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.965548] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.965822] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.965941] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.966146] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.966330] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 670.469449] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Getting list of instances from cluster (obj){ [ 670.469449] env[62820]: value = "domain-c8" [ 670.469449] env[62820]: _type = "ClusterComputeResource" [ 670.469449] env[62820]: } {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 670.470764] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f38916-c6b8-4117-9118-3a169a07c68a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.479858] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Got total of 0 instances {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 670.480130] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 670.480360] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 670.480534] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 670.984189] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.984470] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.984565] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.984718] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 670.985578] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc61d0b-7a31-473e-95cb-6d7446a406f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.994244] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02af2a15-efcb-43ee-9fe2-9ef327202fe4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.008605] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e47865-883d-4554-b89b-baff08680547 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.015628] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bfeefd-4868-44b4-8a57-7c4bb96818d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.044187] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181436MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 671.044369] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.044515] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.063618] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 672.063887] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 672.078164] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f2ac16-563f-4267-9086-639e5caa95e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.085672] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f84ae0-b075-47f9-94b5-a4b25af89628 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.114748] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1ea5c3-fe8c-458a-8d90-f8abcb1e1152 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.121531] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65192667-1d90-45b9-8f6c-a7cb4cd214f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.134167] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.637288] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 673.141617] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 673.141976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.097s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.142054] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.142385] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Getting list of instances from cluster (obj){ [ 673.142385] env[62820]: value = "domain-c8" [ 673.142385] env[62820]: _type = "ClusterComputeResource" [ 673.142385] env[62820]: } {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 673.143405] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4a52ad-4087-4e90-ae96-b0b633609b8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.151968] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Got total of 0 instances {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 730.366808] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.367240] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.872754] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.873075] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 730.873075] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 731.376302] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 731.376572] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.376701] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.376838] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.376983] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.377137] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.377278] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.377406] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 731.377544] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.880500] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.880812] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.881070] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.881248] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 731.882144] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78171bb-c3b2-4767-9e17-266540f79904 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.890124] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7b6bc9-f151-4102-9b37-e3f1b2490d26 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.903642] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291b953d-d29d-47b6-8d97-10a2a574441d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.909492] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e2d5db-6d24-4ec1-9373-df065a91e7ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.937758] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181415MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 731.937946] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.938083] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.957076] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 732.957076] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 732.969833] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca3b94b-fe91-4275-b03c-07124e56e04a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.977436] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa6846d-b5d7-47bb-9251-ad55bfba7f0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.006859] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a241b1-075f-4009-8bcf-8ae1afd7209c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.014195] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5dbfb1-ae99-4327-877c-0e2b93505bbb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.027329] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.530266] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 733.531593] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 733.531773] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.533046] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.533400] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.533508] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 793.533572] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 794.036246] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 794.036497] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.036645] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.036789] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.036934] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.037085] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.037276] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.037418] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 794.037568] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.540647] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.540995] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.541063] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.541205] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 794.542104] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a916314a-2c6a-476d-a0f9-a9da0e8a84c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.550160] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be5aaeb-92ab-41c8-a05b-e2a4af793cb5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.564396] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031403ed-a142-4241-8f06-cc4387aa3c23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.570424] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bfabb5-62bd-4bff-afdc-4ae14010bb7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.598152] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181430MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 794.598287] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.598474] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.617108] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 795.617428] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 795.629795] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81250229-fa8f-4877-a4b3-7e0cddd8a027 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.637672] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e5fe7f-1b28-4f13-a5b8-aa8b563cb622 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.666820] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e830c7a-c38c-4f12-900b-63e63eeb556c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.673434] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1fbaff1-5f6b-45ce-ae76-1d858fd14da2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.685798] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.188920] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 796.190245] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 796.190444] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.333114] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.837380] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.837686] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 849.837686] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 850.340461] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 850.340985] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.340985] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.341175] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.341247] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.341372] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.341516] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 850.341657] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.845208] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.845458] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.845628] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.845782] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 850.846705] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849fdd44-1b06-4699-af84-4ba1cdbd8d8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.855260] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf057729-368f-409c-b59c-2065f01db3a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.869082] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9eb3abf-606b-416b-9a24-c379440413ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.878637] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f19d0dc-6d52-41de-9656-27aea341fd37 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.907963] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181432MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 850.908208] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.908329] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.926781] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 851.927050] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 851.939730] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0cd918-c9ab-499f-bd3a-820317f37913 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.947358] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefe0175-05f0-4006-88e4-03f206f83e99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.976214] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68029b64-41e9-4321-b972-e11547c0e7c4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.982879] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63205fd6-1870-4aac-93b0-5aa720d32702 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.996088] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.499667] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 852.501057] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 852.501243] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.840847] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.841090] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.682316] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.682750] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Cleaning up deleted instances {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11586}} [ 907.185902] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] There are 0 instances to clean {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11595}} [ 907.186174] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.186314] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Cleaning up deleted instances with incomplete migration {{(pid=62820) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11624}} [ 907.689015] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.190406] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.190782] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.190782] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.190933] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 909.681284] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.675756] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.680413] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.680606] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 910.680695] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 911.184305] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 911.184580] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.184756] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.688602] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.688968] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.689036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.689166] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 911.690034] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea88d7b9-5dcd-4e68-89e3-d6251e6f3da2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.698019] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e09494-ee5b-4a1d-9475-e50aac0ee3a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.711297] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067adc34-bd44-4f84-985c-d16c293e1457 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.717591] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b12063-694e-45c4-a61b-65fcf68bf2dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.748510] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181435MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 911.748510] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.748510] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.765526] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 912.765757] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 912.778655] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4161338-2f96-4d0a-a7a9-672c63b50843 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.786548] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da457983-01d0-47f3-9a0a-437dc49abf6a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.814819] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d3eb5a-6935-4bc1-a261-3b3186c9b066 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.821330] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8956fc53-be1f-403b-b5fc-ac59259871d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.834418] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.337159] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 913.338449] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 913.338629] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.834771] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.676651] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.681557] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.681557] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.681557] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 969.681847] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.681857] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.681857] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.676415] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.680067] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.680067] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 972.680243] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 973.182782] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 973.183075] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.686503] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.686799] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.686930] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.687096] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 973.688025] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d25823-a528-45a3-bec1-bc7d65782e3d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.696472] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7592c00-7b8d-4bc4-a1f6-55c9ded83eae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.709738] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d689723-d789-4646-8099-4297a08d6263 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.715630] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235bf741-cda5-4241-a47a-eba6a08a2496 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.744030] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181429MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 973.744186] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.744375] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.780971] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 974.781219] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 974.799294] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 974.813274] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 974.813446] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 974.824812] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 974.839612] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 974.850026] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e013aed-7e26-4a68-8f87-062e356b03b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.857458] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f65b901-d87c-48f6-8644-78344e1b79ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.885879] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8976e5b0-2d43-4734-ab4f-6a159127b318 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.892699] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37227141-8dea-4ca2-8d91-87be5f8bb308 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.905774] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.409161] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 975.410481] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 975.410665] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.666s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.908434] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.680197] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.680644] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1029.681646] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.681412] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.682359] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.681633] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.681825] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1032.681956] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 1033.185061] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 1033.185467] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.185508] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.689062] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.689316] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.689761] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.689761] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1033.690521] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fae5b91-51ed-45ef-8cf5-9171f8ee2663 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.698704] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94be8dda-17bb-4bf7-97e8-aab19774c510 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.712124] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a768c51b-c9ca-4371-a6a0-f9f2f52fa3c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.717841] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa0db75-16e1-4e3a-944e-04a1f3311b3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.746095] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181418MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1033.746255] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.746422] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.763946] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1034.764269] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1034.776703] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e30233c-19f6-4479-8f16-73865a784cd3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.784379] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d54678-ca05-4f26-bf3a-04eb675bb588 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.812681] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af294ed-b4ad-4a5f-989c-dbc2110c2e76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.819321] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623f2e30-d8b6-4a98-85e5-00190ee93451 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.832156] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.335092] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1035.336368] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1035.336556] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.331467] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.331702] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.680706] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.681143] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.681143] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1090.677407] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.680798] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.680578] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.680420] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.681069] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.681069] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1094.681069] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 1095.183855] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 1095.184128] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.687663] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.689097] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.689097] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.689097] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1095.689097] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da893fdf-5842-4e60-b798-89765d5e1d0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.696988] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878ea6e6-092d-4c71-8fd3-24c563846b90 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.710445] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f767b70-3efb-4f2c-b2a8-0baef3a2a645 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.716420] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa38400-f745-4c24-9f01-1de12dce2a48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.745250] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181421MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1095.745378] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.745566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.764161] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1096.764419] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1096.777529] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1fd3f5-a015-4a4b-8e76-5ae3d053a11c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.785215] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b563c5a7-64c8-4042-b50e-d5acb72daaf8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.813981] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa36164e-4a87-4681-bf7b-49d822a67a13 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.820994] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b08bad5-d30c-477b-a696-7d8d01081a43 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.833750] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.336633] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1097.337840] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1097.338055] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.333671] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.334187] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1149.682031] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.680873] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.681127] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1153.680622] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.680999] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.680999] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.681698] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.681961] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1154.681996] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 1155.184854] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 1155.680583] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1156.184262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.184639] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.184639] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.184800] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1156.185730] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb944ff5-9cf6-45fb-8db3-f6aaa3bb0c2d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.193733] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5e52f9-c84f-4ba1-911a-a675f349dda9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.207433] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7086abc3-bce2-432c-9a31-7f593d89ec4b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.213878] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134e45ab-f090-4f0f-b469-c409c7cdaa4c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.241717] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181423MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1156.241848] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.242043] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1157.260392] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1157.260792] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1157.274704] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b60e92b-657c-4ddb-808e-1dc15d9eec9d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.281936] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a823155-3e23-4d40-91c1-23d9d60cc9b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.310229] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2ce0ef-4912-488a-998f-8fb12c1b8017 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.316602] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24f69bb-ee7f-4ff0-a6d5-8c15e012fa24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.328804] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.832245] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1157.833574] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1157.833765] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.830043] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.830043] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.680964] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.676747] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.681145] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.681538] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1213.681279] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.681639] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.183467] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.183878] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Cleaning up deleted instances {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11586}} [ 1215.686515] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] There are 0 instances to clean {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11595}} [ 1215.686755] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.686871] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Cleaning up deleted instances with incomplete migration {{(pid=62820) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11624}} [ 1216.687030] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1216.687399] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1216.687399] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 1217.190471] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 1217.190697] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.190866] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.680596] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.184230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.184613] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.184678] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.184793] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1218.185705] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9274c23e-f73d-4172-a1f5-7cf9680c41cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.194100] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86558268-cf9e-48b4-bed5-3eb56c92fea9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.207561] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6dcaa9-7801-4fb9-a753-93b218b290bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.213363] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bb2760-6578-401d-8cb3-bb1301c5a85d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.241170] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181435MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1218.241339] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.241523] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.260082] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1219.260308] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1219.274359] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ce873d-66d7-4d49-8c31-7c375d97ff12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.282021] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfede56-3439-488d-a6ff-59c13c62bdb8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.310360] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4eee1a3-fe22-49e9-b912-fc0c372d032c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.316837] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ed7854-c214-45b0-813e-87b9e65d7c7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.329223] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.832706] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1219.834023] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1219.834207] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.830064] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.830064] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1269.969037] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.476725] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Getting list of instances from cluster (obj){ [ 1270.476725] env[62820]: value = "domain-c8" [ 1270.476725] env[62820]: _type = "ClusterComputeResource" [ 1270.476725] env[62820]: } {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1270.478361] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514f02a3-a522-4627-96d1-3c88f4335f66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.487381] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Got total of 0 instances {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1272.199340] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.681645] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.681645] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1274.880561] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.880849] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.114906] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.115183] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.384679] env[62820]: DEBUG nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1275.619201] env[62820]: DEBUG nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1275.681633] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.027982] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.028261] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.029892] env[62820]: INFO nova.compute.claims [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1276.146514] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.683709] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.064822] env[62820]: DEBUG nova.scheduler.client.report [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1277.081861] env[62820]: DEBUG nova.scheduler.client.report [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1277.081861] env[62820]: DEBUG nova.compute.provider_tree [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1277.095810] env[62820]: DEBUG nova.scheduler.client.report [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1277.123332] env[62820]: DEBUG nova.scheduler.client.report [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1277.177283] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d638ca-2f6b-41b4-9329-a53838ed97b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.188011] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5284c591-b3fe-4c4a-86cf-535593c54436 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.222973] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89265ccb-e3ec-4f65-acfc-363b3e83c082 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.231152] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9840e71-c33c-45d4-9b7d-0e9822578613 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.246455] env[62820]: DEBUG nova.compute.provider_tree [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.680140] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.680515] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.752701] env[62820]: DEBUG nova.scheduler.client.report [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1278.186268] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.257031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.228s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.257532] env[62820]: DEBUG nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1278.261967] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.116s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.263390] env[62820]: INFO nova.compute.claims [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1278.771126] env[62820]: DEBUG nova.compute.utils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.772834] env[62820]: DEBUG nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1278.773190] env[62820]: DEBUG nova.network.neutron [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1279.287021] env[62820]: DEBUG nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1279.368348] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62736ee1-70c7-4f69-b3c7-e8fa4f4561ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.378798] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a61dcd5-211c-42c2-89ad-a96177993490 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.417847] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c6b554-203c-4d6f-8db0-3e2804505a15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.425643] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692b25c7-4e80-4cc0-ad31-3a1f9935f353 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.439917] env[62820]: DEBUG nova.compute.provider_tree [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1279.863720] env[62820]: DEBUG nova.policy [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48284dfde04d4d7d8d1ceb9a3204121b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03b3c8eaed13452eb00e8d97383df642', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1279.949633] env[62820]: DEBUG nova.scheduler.client.report [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1280.303949] env[62820]: DEBUG nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1280.340165] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1280.340165] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1280.340165] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1280.340318] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1280.340500] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1280.341072] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1280.341632] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1280.341891] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1280.342175] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1280.342587] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1280.342949] env[62820]: DEBUG nova.virt.hardware [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1280.345304] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c287f1-77ec-479c-890f-6566b0cb425b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.358278] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36c7119-a122-4074-becf-ecddcf596e99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.375134] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdb2eaf-4a58-46fe-829e-f8a54891a5f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.457221] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.457778] env[62820]: DEBUG nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1280.461660] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.276s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.461844] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.462000] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1280.464794] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e06e8ed-777b-4f10-9e52-42d8dec0c9a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.472136] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3801519-9af9-4bdd-a437-17b571b032f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.491957] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b54fd0-2bde-4075-91d8-ee221623c471 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.499696] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1489eb-d239-41df-8e2c-501790b2e778 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.543430] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181416MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1280.543599] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.543827] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.966727] env[62820]: DEBUG nova.compute.utils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1280.967089] env[62820]: DEBUG nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1280.967323] env[62820]: DEBUG nova.network.neutron [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1281.073156] env[62820]: DEBUG nova.network.neutron [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Successfully created port: ddb6608b-05dd-480c-9e52-01ba94622f69 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1281.132201] env[62820]: DEBUG nova.policy [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2a98cf26a4949abadead50c7354a638', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '04698d19505d400594ce250863e15456', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1281.476513] env[62820]: DEBUG nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1281.589205] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.590586] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.593224] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1281.593484] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1281.653590] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf5e91c-b4cd-41b6-8c99-2cd05eacaa24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.668241] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d547f74-b2bf-4dc6-9dc3-ffd19937de8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.712086] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fd27d9-ae82-4891-a460-2e789b72f105 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.727049] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80ab3ab-976b-4f60-af95-240d67922a4c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.742469] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.747031] env[62820]: DEBUG nova.network.neutron [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Successfully created port: 337d3329-4826-4d1a-a659-b6ce135f8b94 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1282.246512] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1282.492209] env[62820]: DEBUG nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1282.545867] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1282.551348] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1282.551348] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1282.551348] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1282.551348] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1282.551348] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1282.551571] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1282.551571] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1282.551571] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1282.551571] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1282.551571] env[62820]: DEBUG nova.virt.hardware [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1282.553075] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051378f9-82e4-48d6-8b66-9d8f34e51d02 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.564907] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a72f31d-722b-4bc4-8c03-8fa6a9462032 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.753446] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1282.753446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.209s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.753196] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.754829] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.755133] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1283.755322] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 1284.259864] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1284.260059] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1284.260199] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 1284.260402] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.107770] env[62820]: DEBUG nova.network.neutron [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Successfully updated port: ddb6608b-05dd-480c-9e52-01ba94622f69 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1285.491234] env[62820]: DEBUG nova.network.neutron [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Successfully updated port: 337d3329-4826-4d1a-a659-b6ce135f8b94 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1285.614778] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "refresh_cache-3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.617716] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquired lock "refresh_cache-3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.617716] env[62820]: DEBUG nova.network.neutron [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1285.995085] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.995085] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.995085] env[62820]: DEBUG nova.network.neutron [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.181778] env[62820]: DEBUG nova.network.neutron [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1286.522869] env[62820]: DEBUG nova.network.neutron [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Updating instance_info_cache with network_info: [{"id": "ddb6608b-05dd-480c-9e52-01ba94622f69", "address": "fa:16:3e:18:21:4a", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb6608b-05", "ovs_interfaceid": "ddb6608b-05dd-480c-9e52-01ba94622f69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.584664] env[62820]: DEBUG nova.network.neutron [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1286.776928] env[62820]: DEBUG nova.network.neutron [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Updating instance_info_cache with network_info: [{"id": "337d3329-4826-4d1a-a659-b6ce135f8b94", "address": "fa:16:3e:17:56:bb", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap337d3329-48", "ovs_interfaceid": "337d3329-4826-4d1a-a659-b6ce135f8b94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.029580] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Releasing lock "refresh_cache-3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1287.029664] env[62820]: DEBUG nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Instance network_info: |[{"id": "ddb6608b-05dd-480c-9e52-01ba94622f69", "address": "fa:16:3e:18:21:4a", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb6608b-05", "ovs_interfaceid": "ddb6608b-05dd-480c-9e52-01ba94622f69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1287.031625] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:21:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ddb6608b-05dd-480c-9e52-01ba94622f69', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1287.047117] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.047117] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4148941-a059-491d-9a34-00be440a2122 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.059449] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Created folder: OpenStack in parent group-v4. [ 1287.059644] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Creating folder: Project (03b3c8eaed13452eb00e8d97383df642). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.059890] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c06ad57-7023-4e8b-a313-88005f29ee53 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.068726] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Created folder: Project (03b3c8eaed13452eb00e8d97383df642) in parent group-v353379. [ 1287.068828] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Creating folder: Instances. Parent ref: group-v353380. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.069075] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52045179-d007-48cd-8c88-b6bd73e9ce62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.078993] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Created folder: Instances in parent group-v353380. [ 1287.079485] env[62820]: DEBUG oslo.service.loopingcall [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1287.079783] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1287.080093] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd57ad59-87f9-4e6b-bf2c-0f83fbbda611 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.104037] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1287.104037] env[62820]: value = "task-1694979" [ 1287.104037] env[62820]: _type = "Task" [ 1287.104037] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.112641] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1694979, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.280130] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1287.280130] env[62820]: DEBUG nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Instance network_info: |[{"id": "337d3329-4826-4d1a-a659-b6ce135f8b94", "address": "fa:16:3e:17:56:bb", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap337d3329-48", "ovs_interfaceid": "337d3329-4826-4d1a-a659-b6ce135f8b94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1287.280513] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:56:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '337d3329-4826-4d1a-a659-b6ce135f8b94', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1287.290357] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Creating folder: Project (04698d19505d400594ce250863e15456). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.291047] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04f13dec-81f5-45fc-8b09-ddb14c1b90f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.303535] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Created folder: Project (04698d19505d400594ce250863e15456) in parent group-v353379. [ 1287.304323] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Creating folder: Instances. Parent ref: group-v353383. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.305259] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c39df381-aadb-4d99-89d6-9bbcb295b17f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.316064] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Created folder: Instances in parent group-v353383. [ 1287.316425] env[62820]: DEBUG oslo.service.loopingcall [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1287.316516] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1287.316838] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e73d2f90-8b13-4b47-9852-a77a59dce7f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.338983] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1287.338983] env[62820]: value = "task-1694982" [ 1287.338983] env[62820]: _type = "Task" [ 1287.338983] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.351382] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1694982, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.618339] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1694979, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.851125] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1694982, 'name': CreateVM_Task, 'duration_secs': 0.375115} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.851840] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1287.859778] env[62820]: DEBUG oslo_vmware.service [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c8951e-9a89-4f3a-87f8-469f1d6aa616 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.867917] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.868108] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.868788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1287.869052] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b5dc6cd-2837-4f8e-aeac-4084711598c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.875537] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1287.875537] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526617cf-fbcf-c416-ae8d-b297fcab5911" [ 1287.875537] env[62820]: _type = "Task" [ 1287.875537] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.888842] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526617cf-fbcf-c416-ae8d-b297fcab5911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.116372] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1694979, 'name': CreateVM_Task, 'duration_secs': 0.565788} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.117071] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1288.117450] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.339738] env[62820]: DEBUG nova.compute.manager [req-be45e25c-390c-4866-9e0b-020e192e819f req-4e3229e1-3798-40c7-a751-43e61c8c6780 service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Received event network-vif-plugged-ddb6608b-05dd-480c-9e52-01ba94622f69 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1288.340016] env[62820]: DEBUG oslo_concurrency.lockutils [req-be45e25c-390c-4866-9e0b-020e192e819f req-4e3229e1-3798-40c7-a751-43e61c8c6780 service nova] Acquiring lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.340186] env[62820]: DEBUG oslo_concurrency.lockutils [req-be45e25c-390c-4866-9e0b-020e192e819f req-4e3229e1-3798-40c7-a751-43e61c8c6780 service nova] Lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.340352] env[62820]: DEBUG oslo_concurrency.lockutils [req-be45e25c-390c-4866-9e0b-020e192e819f req-4e3229e1-3798-40c7-a751-43e61c8c6780 service nova] Lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.340524] env[62820]: DEBUG nova.compute.manager [req-be45e25c-390c-4866-9e0b-020e192e819f req-4e3229e1-3798-40c7-a751-43e61c8c6780 service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] No waiting events found dispatching network-vif-plugged-ddb6608b-05dd-480c-9e52-01ba94622f69 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1288.342404] env[62820]: WARNING nova.compute.manager [req-be45e25c-390c-4866-9e0b-020e192e819f req-4e3229e1-3798-40c7-a751-43e61c8c6780 service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Received unexpected event network-vif-plugged-ddb6608b-05dd-480c-9e52-01ba94622f69 for instance with vm_state building and task_state spawning. [ 1288.390014] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.390014] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1288.390159] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.390305] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.391246] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1288.391751] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.392278] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1288.392476] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c17e112-e696-4ae0-b514-d8530dc33bd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.398765] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d773c5f3-779d-4e12-9157-49456d7e037c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.408681] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1288.408681] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1288.409786] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2594e04d-9082-46fc-a981-e565df393aaa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.419965] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11572057-5588-4a1d-a0f3-8f6e6758b9b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.422543] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1288.422543] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525285ea-c51d-e3de-56ec-43ded47a5f7c" [ 1288.422543] env[62820]: _type = "Task" [ 1288.422543] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.427629] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1288.427629] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52042862-4db8-6e43-12d9-17332edd7c80" [ 1288.427629] env[62820]: _type = "Task" [ 1288.427629] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.436521] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.437282] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1288.437282] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.440697] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52042862-4db8-6e43-12d9-17332edd7c80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.633611] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.634057] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.942694] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Preparing fetch location {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1288.943029] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Creating directory with path [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1288.943821] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9972d2b-1972-4521-99b2-579a48170097 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.965546] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Created directory with path [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1288.965829] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Fetch image to [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/tmp-sparse.vmdk {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1288.967127] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Downloading image file data b17619ac-779a-4463-ab94-4bb0b9ba63c1 to [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/tmp-sparse.vmdk on the data store datastore1 {{(pid=62820) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1288.967127] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff000cf-1026-4f34-a14e-9849be836457 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.978394] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30993240-e735-4dc9-9f50-7b50cc31aaa1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.991536] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef5caf9-83f4-494c-97b9-545804e04e10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.027580] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8729c6f-847b-4685-9772-afed24a35464 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.033727] env[62820]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d0cc54c4-10a0-4424-8d00-26442c356a22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.068632] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Downloading image file data b17619ac-779a-4463-ab94-4bb0b9ba63c1 to the data store datastore1 {{(pid=62820) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1289.139225] env[62820]: DEBUG nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1289.148513] env[62820]: DEBUG oslo_vmware.rw_handles [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62820) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1289.677941] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.678478] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.679869] env[62820]: INFO nova.compute.claims [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1290.160263] env[62820]: DEBUG oslo_vmware.rw_handles [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Completed reading data from the image iterator. {{(pid=62820) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1290.160505] env[62820]: DEBUG oslo_vmware.rw_handles [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1290.287164] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Downloaded image file data b17619ac-779a-4463-ab94-4bb0b9ba63c1 to vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/tmp-sparse.vmdk on the data store datastore1 {{(pid=62820) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1290.288155] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Caching image {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1290.288899] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Copying Virtual Disk [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/tmp-sparse.vmdk to [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1290.288899] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f167414-2d3a-4b81-8971-b07c9a53a445 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.300017] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1290.300017] env[62820]: value = "task-1694983" [ 1290.300017] env[62820]: _type = "Task" [ 1290.300017] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.308742] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694983, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.431102] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.431212] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.801087] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc46ded-1b7d-4e80-9fef-947e2a619ab7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.816075] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83edbac7-ee87-4bc3-ae9f-a5f8051fb6b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.819428] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694983, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.855237] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f414e5c-01b2-486c-bf15-898546e3843a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.863824] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4156bcf-5217-4d73-a2f9-60c4a284fc5b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.882320] env[62820]: DEBUG nova.compute.provider_tree [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.934710] env[62820]: DEBUG nova.compute.manager [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1291.106814] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.106814] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.312135] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694983, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682323} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.312470] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Copied Virtual Disk [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/tmp-sparse.vmdk to [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1291.312781] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleting the datastore file [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1/tmp-sparse.vmdk {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1291.313399] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abea0bb7-e0e6-4160-ad53-c9cf40a41da5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.323811] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1291.323811] env[62820]: value = "task-1694984" [ 1291.323811] env[62820]: _type = "Task" [ 1291.323811] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.333492] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694984, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.386076] env[62820]: DEBUG nova.scheduler.client.report [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1291.473788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.611758] env[62820]: DEBUG nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1291.633608] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquiring lock "90ea0c16-739a-4132-ac36-e154a846b9c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.633608] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.779829] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquiring lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.780059] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.836089] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694984, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024476} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.836352] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1291.836527] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Moving file from [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853/b17619ac-779a-4463-ab94-4bb0b9ba63c1 to [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1. {{(pid=62820) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1291.836772] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-0660684d-c6e1-41b1-9943-85c695ea87d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.844821] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1291.844821] env[62820]: value = "task-1694985" [ 1291.844821] env[62820]: _type = "Task" [ 1291.844821] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.854084] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694985, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.891868] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.892157] env[62820]: DEBUG nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1291.900062] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.422s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.900062] env[62820]: INFO nova.compute.claims [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1292.136516] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.137584] env[62820]: DEBUG nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1292.286989] env[62820]: DEBUG nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1292.355580] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694985, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024283} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.355678] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] File moved {{(pid=62820) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1292.355849] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Cleaning up location [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1292.356041] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleting the datastore file [datastore1] vmware_temp/6f159573-5b72-42fe-b305-a531373a9853 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1292.356300] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43197a1e-0ff6-43ee-ae39-4367a47229b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.363571] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1292.363571] env[62820]: value = "task-1694986" [ 1292.363571] env[62820]: _type = "Task" [ 1292.363571] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.374492] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694986, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.400514] env[62820]: DEBUG nova.compute.utils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1292.409634] env[62820]: DEBUG nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1292.409860] env[62820]: DEBUG nova.network.neutron [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1292.556722] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquiring lock "043e14a3-df5a-4098-b147-c6460bb85423" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.556722] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "043e14a3-df5a-4098-b147-c6460bb85423" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.662699] env[62820]: DEBUG nova.policy [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf77578635f74b52970b2d7580c1bfd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b9015dc7894a1d98bf0bb73bdf7636', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1292.669745] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.785215] env[62820]: DEBUG nova.compute.manager [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Received event network-changed-ddb6608b-05dd-480c-9e52-01ba94622f69 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1292.785427] env[62820]: DEBUG nova.compute.manager [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Refreshing instance network info cache due to event network-changed-ddb6608b-05dd-480c-9e52-01ba94622f69. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1292.786238] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Acquiring lock "refresh_cache-3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.786238] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Acquired lock "refresh_cache-3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.786238] env[62820]: DEBUG nova.network.neutron [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Refreshing network info cache for port ddb6608b-05dd-480c-9e52-01ba94622f69 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1292.828128] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.879096] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694986, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02526} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.879361] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1292.881802] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0030ad51-bc23-4de0-af4d-c7825715d8ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.892890] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1292.892890] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527194ef-91a1-822d-9664-0a1c6959b9db" [ 1292.892890] env[62820]: _type = "Task" [ 1292.892890] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.907722] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527194ef-91a1-822d-9664-0a1c6959b9db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.910310] env[62820]: DEBUG nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1292.920499] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "2f917745-28ef-4dfe-8c09-45c15a80145d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.921072] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "2f917745-28ef-4dfe-8c09-45c15a80145d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.061658] env[62820]: DEBUG nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1293.269591] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e7ffe7-aa13-42d2-ac65-52e89763697c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.278982] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcaac285-d760-48a1-af32-68d42af11281 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.323813] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4b7334-0671-493c-a439-fb499e8f7edd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.333458] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280a3e12-29ae-47b9-8bb2-6119ea4484ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.354129] env[62820]: DEBUG nova.compute.provider_tree [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1293.403978] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527194ef-91a1-822d-9664-0a1c6959b9db, 'name': SearchDatastore_Task, 'duration_secs': 0.02223} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.404264] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.404550] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4fa6e38f-dcca-4f65-86d6-1c585deb1c13/4fa6e38f-dcca-4f65-86d6-1c585deb1c13.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1293.404829] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.405013] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1293.405228] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e1cb946-7c65-404a-a0f7-c23b71f47e3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.407272] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c45e06c8-734d-470c-a9b1-666d00cfcbd8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.415009] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1293.415009] env[62820]: value = "task-1694987" [ 1293.415009] env[62820]: _type = "Task" [ 1293.415009] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.426702] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1293.426912] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1293.428052] env[62820]: DEBUG nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1293.431352] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eed0025-6f7d-4c7b-b4e5-ddcb4ed18fb3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.438223] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.443143] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1293.443143] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b3ce23-c958-4eb7-e615-ba3938af047d" [ 1293.443143] env[62820]: _type = "Task" [ 1293.443143] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.451492] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b3ce23-c958-4eb7-e615-ba3938af047d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.545844] env[62820]: DEBUG nova.network.neutron [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Successfully created port: 7f1b810c-dc19-4971-a532-bdac241941cf {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1293.592454] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.859255] env[62820]: DEBUG nova.scheduler.client.report [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1293.927279] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694987, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.929761] env[62820]: DEBUG nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1293.967033] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b3ce23-c958-4eb7-e615-ba3938af047d, 'name': SearchDatastore_Task, 'duration_secs': 0.016497} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.971298] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1293.971746] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1293.972150] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1293.972458] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1293.972515] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1293.972642] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1293.972872] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1293.973054] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1293.973234] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1293.973998] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1293.973998] env[62820]: DEBUG nova.virt.hardware [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1293.975400] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449b5696-1009-4ecd-b067-b00833bd9bda {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.979329] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4961634c-4de7-4de0-ad1d-0239052c33f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.984869] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.992467] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ee39dd-9a0f-4380-b6c7-c9da97ddb2db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.003077] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1294.003077] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a3c811-1ac2-4476-a78f-b0614c4790ed" [ 1294.003077] env[62820]: _type = "Task" [ 1294.003077] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.020444] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a3c811-1ac2-4476-a78f-b0614c4790ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009115} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.020766] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.021678] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111/3c5f66f1-c4e4-4ffd-8979-f7f828dc7111.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1294.021678] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4ba1f71-7b9f-4f1b-8f6c-02c1dfdb03c2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.030634] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1294.030634] env[62820]: value = "task-1694990" [ 1294.030634] env[62820]: _type = "Task" [ 1294.030634] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.040625] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1694990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.088920] env[62820]: DEBUG nova.network.neutron [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Updated VIF entry in instance network info cache for port ddb6608b-05dd-480c-9e52-01ba94622f69. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1294.089167] env[62820]: DEBUG nova.network.neutron [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Updating instance_info_cache with network_info: [{"id": "ddb6608b-05dd-480c-9e52-01ba94622f69", "address": "fa:16:3e:18:21:4a", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb6608b-05", "ovs_interfaceid": "ddb6608b-05dd-480c-9e52-01ba94622f69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.366181] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.366960] env[62820]: DEBUG nova.compute.manager [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1294.372016] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.234s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.377286] env[62820]: INFO nova.compute.claims [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1294.433989] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694987, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546837} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.434505] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4fa6e38f-dcca-4f65-86d6-1c585deb1c13/4fa6e38f-dcca-4f65-86d6-1c585deb1c13.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1294.434909] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1294.435354] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1626e264-c49b-4069-b5fa-0fe16137b037 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.444024] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1294.444024] env[62820]: value = "task-1694993" [ 1294.444024] env[62820]: _type = "Task" [ 1294.444024] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.455514] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694993, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.542225] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1694990, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504254} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.542527] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111/3c5f66f1-c4e4-4ffd-8979-f7f828dc7111.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1294.542740] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1294.543144] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71ca64e9-54d6-4a82-ae36-c51e8667778e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.553213] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1294.553213] env[62820]: value = "task-1694994" [ 1294.553213] env[62820]: _type = "Task" [ 1294.553213] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.563947] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1694994, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.593770] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Releasing lock "refresh_cache-3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.594094] env[62820]: DEBUG nova.compute.manager [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Received event network-vif-plugged-337d3329-4826-4d1a-a659-b6ce135f8b94 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1294.594440] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Acquiring lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.594604] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.594715] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.594891] env[62820]: DEBUG nova.compute.manager [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] No waiting events found dispatching network-vif-plugged-337d3329-4826-4d1a-a659-b6ce135f8b94 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1294.595094] env[62820]: WARNING nova.compute.manager [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Received unexpected event network-vif-plugged-337d3329-4826-4d1a-a659-b6ce135f8b94 for instance with vm_state building and task_state spawning. [ 1294.595244] env[62820]: DEBUG nova.compute.manager [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Received event network-changed-337d3329-4826-4d1a-a659-b6ce135f8b94 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1294.595374] env[62820]: DEBUG nova.compute.manager [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Refreshing instance network info cache due to event network-changed-337d3329-4826-4d1a-a659-b6ce135f8b94. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1294.595673] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Acquiring lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.596043] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Acquired lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.596043] env[62820]: DEBUG nova.network.neutron [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Refreshing network info cache for port 337d3329-4826-4d1a-a659-b6ce135f8b94 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1294.612363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquiring lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.613746] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.881741] env[62820]: DEBUG nova.compute.utils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1294.885714] env[62820]: DEBUG nova.compute.manager [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Not allocating networking since 'none' was specified. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1294.923286] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "b7c52283-eada-47fd-887f-a5ad94a0583a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.923286] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.952592] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694993, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078292} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.954096] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1294.954320] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ee2e6a-23f7-43aa-870d-4b2eca2465c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.980351] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 4fa6e38f-dcca-4f65-86d6-1c585deb1c13/4fa6e38f-dcca-4f65-86d6-1c585deb1c13.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1294.980683] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-388c4b01-5111-430c-8b10-e7bccde86101 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.001807] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1295.001807] env[62820]: value = "task-1694995" [ 1295.001807] env[62820]: _type = "Task" [ 1295.001807] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.010199] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694995, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.069019] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1694994, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06725} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.073684] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1295.074480] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "9910a0ea-5ce0-41e9-b449-da729a4c3223" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.074678] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9910a0ea-5ce0-41e9-b449-da729a4c3223" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.075502] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef39dd16-8b2c-405c-8a20-d2a835885b3d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.101276] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111/3c5f66f1-c4e4-4ffd-8979-f7f828dc7111.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1295.101913] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44ef24f5-2401-4f4a-a88e-4a809dd400d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.123403] env[62820]: DEBUG nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1295.125875] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1295.125875] env[62820]: value = "task-1694996" [ 1295.125875] env[62820]: _type = "Task" [ 1295.125875] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.134090] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1694996, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.386159] env[62820]: DEBUG nova.compute.manager [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1295.526517] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694995, 'name': ReconfigVM_Task, 'duration_secs': 0.319674} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.529823] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 4fa6e38f-dcca-4f65-86d6-1c585deb1c13/4fa6e38f-dcca-4f65-86d6-1c585deb1c13.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1295.531012] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00740c32-e17d-46f7-9509-1aafcafb5692 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.539977] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1295.539977] env[62820]: value = "task-1694997" [ 1295.539977] env[62820]: _type = "Task" [ 1295.539977] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.553460] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694997, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.644950] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1694996, 'name': ReconfigVM_Task, 'duration_secs': 0.297278} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.644950] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111/3c5f66f1-c4e4-4ffd-8979-f7f828dc7111.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1295.644950] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d95dca24-ef64-4f12-b1eb-4e5157c30940 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.649981] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.654719] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1295.654719] env[62820]: value = "task-1694998" [ 1295.654719] env[62820]: _type = "Task" [ 1295.654719] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.673129] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1694998, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.674395] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d820a0-1fa8-45c1-afad-678c07adaff1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.685506] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35857894-4a53-4f1c-8d13-0983489612a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.718198] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b320a95b-0793-4af1-a809-6dac4f15099e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.729704] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ed508d-95cb-44db-96d0-58075561986a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.743497] env[62820]: DEBUG nova.compute.provider_tree [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1295.902205] env[62820]: DEBUG nova.network.neutron [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Updated VIF entry in instance network info cache for port 337d3329-4826-4d1a-a659-b6ce135f8b94. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1295.902205] env[62820]: DEBUG nova.network.neutron [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Updating instance_info_cache with network_info: [{"id": "337d3329-4826-4d1a-a659-b6ce135f8b94", "address": "fa:16:3e:17:56:bb", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap337d3329-48", "ovs_interfaceid": "337d3329-4826-4d1a-a659-b6ce135f8b94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.051809] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694997, 'name': Rename_Task, 'duration_secs': 0.143795} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.057272] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1296.057272] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdd190da-9768-4a52-83b3-2ca83e639f09 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.062740] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1296.062740] env[62820]: value = "task-1694999" [ 1296.062740] env[62820]: _type = "Task" [ 1296.062740] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.072119] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.173365] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1694998, 'name': Rename_Task, 'duration_secs': 0.15216} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.173365] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1296.173365] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-005f4f38-d371-4f43-a781-26443d9c1ef5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.179325] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1296.179325] env[62820]: value = "task-1695000" [ 1296.179325] env[62820]: _type = "Task" [ 1296.179325] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.191446] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.247839] env[62820]: DEBUG nova.scheduler.client.report [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1296.349366] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.349695] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.404500] env[62820]: DEBUG nova.compute.manager [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1296.412703] env[62820]: DEBUG oslo_concurrency.lockutils [req-35845d5b-b593-471c-870e-83e6e322f911 req-62df1940-e040-49c9-b478-32c8fe311b33 service nova] Releasing lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.454521] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1296.455348] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1296.455348] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1296.455348] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1296.455348] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1296.455656] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1296.455656] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1296.460368] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1296.460596] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1296.461182] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1296.461182] env[62820]: DEBUG nova.virt.hardware [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1296.464864] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee1924c-d409-42b5-8596-8f4ee6b80fbf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.479283] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0139a88-3c70-46d4-96bd-ccf3e3f21c2e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.501869] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1296.508691] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Creating folder: Project (316fc4ae8b7745279a240c79189817f8). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1296.509205] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4bf7a3f4-a868-4b40-9026-c3ef2080a43c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.524064] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Created folder: Project (316fc4ae8b7745279a240c79189817f8) in parent group-v353379. [ 1296.524064] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Creating folder: Instances. Parent ref: group-v353389. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1296.524064] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3944214-952a-40f1-a771-5204ef143f02 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.534575] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Created folder: Instances in parent group-v353389. [ 1296.535214] env[62820]: DEBUG oslo.service.loopingcall [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1296.535691] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1296.536023] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-724d6bb4-f92d-4b7b-a90f-3ee163ae793a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.561232] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1296.561232] env[62820]: value = "task-1695003" [ 1296.561232] env[62820]: _type = "Task" [ 1296.561232] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.578549] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695003, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.578980] env[62820]: DEBUG oslo_vmware.api [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1694999, 'name': PowerOnVM_Task, 'duration_secs': 0.48178} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.579357] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1296.580068] env[62820]: INFO nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Took 14.09 seconds to spawn the instance on the hypervisor. [ 1296.580467] env[62820]: DEBUG nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1296.581849] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25efe25-86b7-466b-aea2-ac2accd32ad5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.699272] env[62820]: DEBUG oslo_vmware.api [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695000, 'name': PowerOnVM_Task, 'duration_secs': 0.496596} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.699272] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1296.699588] env[62820]: INFO nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Took 16.40 seconds to spawn the instance on the hypervisor. [ 1296.700283] env[62820]: DEBUG nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1296.701729] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba33fc7b-8552-473f-855d-0f1b3e8ccee3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.757300] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.757878] env[62820]: DEBUG nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1296.763307] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.094s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.765150] env[62820]: INFO nova.compute.claims [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1297.075790] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695003, 'name': CreateVM_Task, 'duration_secs': 0.341827} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.075790] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1297.075790] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.075790] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.075790] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1297.079585] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f01fb516-4325-4551-8201-13823d9b6e28 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.081144] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1297.081144] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5285a0a0-3d6c-61ef-d753-802f9cfb6bb5" [ 1297.081144] env[62820]: _type = "Task" [ 1297.081144] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.091538] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5285a0a0-3d6c-61ef-d753-802f9cfb6bb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.105129] env[62820]: INFO nova.compute.manager [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Took 20.98 seconds to build instance. [ 1297.194149] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquiring lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.194487] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.233667] env[62820]: INFO nova.compute.manager [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Took 21.34 seconds to build instance. [ 1297.271855] env[62820]: DEBUG nova.compute.utils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1297.276036] env[62820]: DEBUG nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1297.276036] env[62820]: DEBUG nova.network.neutron [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1297.534841] env[62820]: DEBUG nova.policy [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf77578635f74b52970b2d7580c1bfd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b9015dc7894a1d98bf0bb73bdf7636', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1297.594332] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5285a0a0-3d6c-61ef-d753-802f9cfb6bb5, 'name': SearchDatastore_Task, 'duration_secs': 0.008474} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.594499] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.595395] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1297.595395] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.595395] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.595395] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1297.595611] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52ed7cf8-857b-4594-ac2b-068e20418cee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.605527] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1297.605723] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1297.606555] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e3bdf8c-d978-4e61-9739-f92d22d514e2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.609905] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09bb429a-5f11-4062-86dd-e101cadff550 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.494s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.614562] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1297.614562] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5215978e-d6d8-64df-c1a9-70a0f1c0f461" [ 1297.614562] env[62820]: _type = "Task" [ 1297.614562] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.624712] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5215978e-d6d8-64df-c1a9-70a0f1c0f461, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.736498] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61302760-53d7-4712-9b3f-ab55acff81bd tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.855s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.779172] env[62820]: DEBUG nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1298.112854] env[62820]: DEBUG nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1298.120510] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af08d83d-0eb2-499f-b06d-900e8a209fd7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.135228] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5215978e-d6d8-64df-c1a9-70a0f1c0f461, 'name': SearchDatastore_Task, 'duration_secs': 0.010403} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.137237] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5a58d77-2850-4e19-9ba1-cbd54c4ab24b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.142186] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2f9dfb-c942-41d9-84f7-d0cee1a649be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.152112] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1298.152112] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c20860-c5df-b52b-64c4-9a32118ba98a" [ 1298.152112] env[62820]: _type = "Task" [ 1298.152112] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.189206] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12dacb8-cfd4-4384-ba72-bcb5402c8a85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.204684] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c20860-c5df-b52b-64c4-9a32118ba98a, 'name': SearchDatastore_Task, 'duration_secs': 0.008993} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.207225] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f560d6-3e0b-45a7-9b7f-bf75eee29212 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.211947] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1298.211947] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657/aacc6f1c-56d6-43b9-9c40-5ea49b40a657.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1298.211947] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e863a79-57d8-4dd6-b518-079c19c0174f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.225642] env[62820]: DEBUG nova.compute.provider_tree [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.230921] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1298.230921] env[62820]: value = "task-1695005" [ 1298.230921] env[62820]: _type = "Task" [ 1298.230921] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.239462] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695005, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.243931] env[62820]: DEBUG nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1298.361827] env[62820]: DEBUG nova.network.neutron [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Successfully updated port: 7f1b810c-dc19-4971-a532-bdac241941cf {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1298.372278] env[62820]: DEBUG nova.network.neutron [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Successfully created port: 4a487d2d-4f2d-43bd-9691-dd7219d7b997 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1298.672642] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.732610] env[62820]: DEBUG nova.scheduler.client.report [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1298.754991] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695005, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.779362] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.796735] env[62820]: DEBUG nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1298.826532] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1298.826865] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1298.827326] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1298.827326] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1298.827563] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1298.827647] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1298.827795] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1298.827949] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1298.828124] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1298.828586] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1298.828642] env[62820]: DEBUG nova.virt.hardware [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1298.829596] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0de336-992d-41b6-b2d8-a761d5643c38 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.839914] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4131314-9d08-4b6e-9468-c7b479f7e7de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.866422] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1298.866579] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.866684] env[62820]: DEBUG nova.network.neutron [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1299.217682] env[62820]: DEBUG nova.compute.manager [req-98d7296c-7945-4dae-806d-0148c3b0688c req-8836bafb-e3e3-442f-bee2-458ea9da0f1c service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Received event network-vif-plugged-7f1b810c-dc19-4971-a532-bdac241941cf {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1299.219167] env[62820]: DEBUG oslo_concurrency.lockutils [req-98d7296c-7945-4dae-806d-0148c3b0688c req-8836bafb-e3e3-442f-bee2-458ea9da0f1c service nova] Acquiring lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.219167] env[62820]: DEBUG oslo_concurrency.lockutils [req-98d7296c-7945-4dae-806d-0148c3b0688c req-8836bafb-e3e3-442f-bee2-458ea9da0f1c service nova] Lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.219167] env[62820]: DEBUG oslo_concurrency.lockutils [req-98d7296c-7945-4dae-806d-0148c3b0688c req-8836bafb-e3e3-442f-bee2-458ea9da0f1c service nova] Lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.219167] env[62820]: DEBUG nova.compute.manager [req-98d7296c-7945-4dae-806d-0148c3b0688c req-8836bafb-e3e3-442f-bee2-458ea9da0f1c service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] No waiting events found dispatching network-vif-plugged-7f1b810c-dc19-4971-a532-bdac241941cf {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1299.219167] env[62820]: WARNING nova.compute.manager [req-98d7296c-7945-4dae-806d-0148c3b0688c req-8836bafb-e3e3-442f-bee2-458ea9da0f1c service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Received unexpected event network-vif-plugged-7f1b810c-dc19-4971-a532-bdac241941cf for instance with vm_state building and task_state spawning. [ 1299.254350] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.489s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.254350] env[62820]: DEBUG nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1299.257548] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695005, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533807} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.257900] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.430s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.259497] env[62820]: INFO nova.compute.claims [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1299.261877] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657/aacc6f1c-56d6-43b9-9c40-5ea49b40a657.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1299.262324] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1299.262560] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d389c9c-73c2-4526-9a7a-38f6f627fe17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.270832] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1299.270832] env[62820]: value = "task-1695007" [ 1299.270832] env[62820]: _type = "Task" [ 1299.270832] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.281147] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.407568] env[62820]: DEBUG nova.network.neutron [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1299.681785] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Acquiring lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.682057] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.682268] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Acquiring lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.682476] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.682675] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.687509] env[62820]: INFO nova.compute.manager [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Terminating instance [ 1299.707886] env[62820]: DEBUG nova.network.neutron [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Updating instance_info_cache with network_info: [{"id": "7f1b810c-dc19-4971-a532-bdac241941cf", "address": "fa:16:3e:05:4b:bd", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f1b810c-dc", "ovs_interfaceid": "7f1b810c-dc19-4971-a532-bdac241941cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.761849] env[62820]: DEBUG nova.compute.utils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1299.764329] env[62820]: DEBUG nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1299.764826] env[62820]: DEBUG nova.network.neutron [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1299.782762] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072796} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.787154] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1299.787154] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c5f740-a64d-41ae-a2c8-2aa306c5aff9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.813095] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657/aacc6f1c-56d6-43b9-9c40-5ea49b40a657.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1299.814385] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1eaf99c3-758d-4de1-abaf-409d176fe21e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.838362] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1299.838362] env[62820]: value = "task-1695008" [ 1299.838362] env[62820]: _type = "Task" [ 1299.838362] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.846392] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695008, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.093336] env[62820]: DEBUG nova.policy [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e2c3d2f9252478688e02f0b210b4725', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7518fa7b0f743ccaa0a14aee92b88fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1300.197484] env[62820]: DEBUG nova.compute.manager [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1300.200080] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1300.200080] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db1e054-78f1-43e3-94c9-35d7e714df44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.212444] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.213953] env[62820]: DEBUG nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance network_info: |[{"id": "7f1b810c-dc19-4971-a532-bdac241941cf", "address": "fa:16:3e:05:4b:bd", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f1b810c-dc", "ovs_interfaceid": "7f1b810c-dc19-4971-a532-bdac241941cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1300.213953] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1300.214205] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:4b:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f1b810c-dc19-4971-a532-bdac241941cf', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1300.226174] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Creating folder: Project (43b9015dc7894a1d98bf0bb73bdf7636). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1300.226174] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cbd63e33-548e-4b4f-8e67-6387df5062db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.229110] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fc73006-8ac5-4f60-91dd-edfb103f63b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.234646] env[62820]: DEBUG oslo_vmware.api [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Waiting for the task: (returnval){ [ 1300.234646] env[62820]: value = "task-1695009" [ 1300.234646] env[62820]: _type = "Task" [ 1300.234646] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.239862] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Created folder: Project (43b9015dc7894a1d98bf0bb73bdf7636) in parent group-v353379. [ 1300.240079] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Creating folder: Instances. Parent ref: group-v353392. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1300.243201] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b07e135d-0614-4431-b09e-cfcf1eba3c72 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.248312] env[62820]: DEBUG oslo_vmware.api [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Task: {'id': task-1695009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.258957] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Created folder: Instances in parent group-v353392. [ 1300.259348] env[62820]: DEBUG oslo.service.loopingcall [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1300.259670] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1300.259983] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acc49506-ebb2-454e-9f98-f61dce18ac88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.278061] env[62820]: DEBUG nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1300.289691] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1300.289691] env[62820]: value = "task-1695012" [ 1300.289691] env[62820]: _type = "Task" [ 1300.289691] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.300766] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695012, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.347557] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695008, 'name': ReconfigVM_Task, 'duration_secs': 0.246194} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.349998] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Reconfigured VM instance instance-00000004 to attach disk [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657/aacc6f1c-56d6-43b9-9c40-5ea49b40a657.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1300.349998] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2037726-2574-475d-8df3-eafdca74d71a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.357639] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1300.357639] env[62820]: value = "task-1695013" [ 1300.357639] env[62820]: _type = "Task" [ 1300.357639] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.370284] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695013, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.619698] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031ab6ee-6e4a-42ee-a290-432d4f5e6b4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.633189] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25174dc-7771-4c57-9c2a-17c84557d592 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.682065] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6b5eb7-8d55-4f0d-82df-6708aed3e9d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.692536] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f986dbb-6efd-47d8-ae0c-bcbd61e5096e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.715082] env[62820]: DEBUG nova.compute.provider_tree [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1300.749662] env[62820]: DEBUG oslo_vmware.api [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Task: {'id': task-1695009, 'name': PowerOffVM_Task, 'duration_secs': 0.230958} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.749928] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1300.750110] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1300.750382] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2654df8-cc14-4bf5-9bcd-f68f18ebb31c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.804809] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695012, 'name': CreateVM_Task, 'duration_secs': 0.366376} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.804990] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1300.807788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.807788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.807788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1300.807788] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72dce2e7-c8f8-4cd6-947e-1b5575043665 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.812855] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1300.812855] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b83302-072f-0450-0ca3-4ee328f0591d" [ 1300.812855] env[62820]: _type = "Task" [ 1300.812855] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.827574] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1300.827783] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1300.827953] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Deleting the datastore file [datastore1] 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1300.828246] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b83302-072f-0450-0ca3-4ee328f0591d, 'name': SearchDatastore_Task, 'duration_secs': 0.009062} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.828435] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f77405f-92c4-4dfa-af71-b332601c8d9b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.830405] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.830627] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1300.830842] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.830980] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.831160] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1300.831421] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3947d5c2-b5ee-4730-8451-edd713128595 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.840795] env[62820]: DEBUG oslo_vmware.api [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Waiting for the task: (returnval){ [ 1300.840795] env[62820]: value = "task-1695015" [ 1300.840795] env[62820]: _type = "Task" [ 1300.840795] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.845212] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1300.845406] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1300.846477] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bbeeba2-7cb8-449f-b796-48b2cdf99588 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.852401] env[62820]: DEBUG oslo_vmware.api [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Task: {'id': task-1695015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.855351] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1300.855351] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52df8f8f-c4f4-ae1d-d817-97cc5f15de75" [ 1300.855351] env[62820]: _type = "Task" [ 1300.855351] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.868572] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52df8f8f-c4f4-ae1d-d817-97cc5f15de75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.871648] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695013, 'name': Rename_Task, 'duration_secs': 0.155379} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.871863] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.872089] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ba35648-eee2-458c-9268-4b3850e3eab5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.878159] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1300.878159] env[62820]: value = "task-1695016" [ 1300.878159] env[62820]: _type = "Task" [ 1300.878159] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.886329] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695016, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.896509] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.896740] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.031971] env[62820]: DEBUG nova.network.neutron [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Successfully created port: 262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.097246] env[62820]: DEBUG nova.network.neutron [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Successfully updated port: 4a487d2d-4f2d-43bd-9691-dd7219d7b997 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1301.217663] env[62820]: DEBUG nova.scheduler.client.report [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1301.294365] env[62820]: DEBUG nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1301.326820] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1301.327137] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1301.327322] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1301.327519] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1301.327739] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1301.327899] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1301.328137] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1301.328361] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1301.328571] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1301.328763] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1301.329033] env[62820]: DEBUG nova.virt.hardware [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1301.330342] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa6d736-7984-4cc9-b18b-c10bb454125e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.339692] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945add97-3ba6-42f2-9b16-d5783c23a007 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.370127] env[62820]: DEBUG oslo_vmware.api [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Task: {'id': task-1695015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249215} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.372656] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1301.372903] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1301.373077] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1301.373237] env[62820]: INFO nova.compute.manager [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1301.373482] env[62820]: DEBUG oslo.service.loopingcall [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1301.374022] env[62820]: DEBUG nova.compute.manager [-] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1301.374130] env[62820]: DEBUG nova.network.neutron [-] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1301.381380] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52df8f8f-c4f4-ae1d-d817-97cc5f15de75, 'name': SearchDatastore_Task, 'duration_secs': 0.022598} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.384989] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33fa75e8-e664-4100-9991-aa820571bce0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.393543] env[62820]: DEBUG oslo_vmware.api [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695016, 'name': PowerOnVM_Task, 'duration_secs': 0.512117} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.393543] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.393725] env[62820]: INFO nova.compute.manager [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Took 4.99 seconds to spawn the instance on the hypervisor. [ 1301.399275] env[62820]: DEBUG nova.compute.manager [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1301.399275] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1301.399275] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522aa765-ce72-6816-0493-16cbd011141e" [ 1301.399275] env[62820]: _type = "Task" [ 1301.399275] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.399275] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b72cea-8003-4c32-90f9-fec5f485d339 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.410768] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522aa765-ce72-6816-0493-16cbd011141e, 'name': SearchDatastore_Task, 'duration_secs': 0.012332} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.410768] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.410768] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1301.410768] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47569134-8fc4-40bc-9d35-1bcd662974f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.417078] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1301.417078] env[62820]: value = "task-1695018" [ 1301.417078] env[62820]: _type = "Task" [ 1301.417078] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.425353] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695018, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.600540] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.604024] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.604024] env[62820]: DEBUG nova.network.neutron [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1301.725716] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.726332] env[62820]: DEBUG nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1301.732478] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.141s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.734144] env[62820]: INFO nova.compute.claims [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1301.924988] env[62820]: INFO nova.compute.manager [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Took 10.48 seconds to build instance. [ 1301.932925] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695018, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.974987] env[62820]: DEBUG nova.compute.manager [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1302.188070] env[62820]: DEBUG nova.network.neutron [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1302.239388] env[62820]: DEBUG nova.compute.utils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1302.240966] env[62820]: DEBUG nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1302.241269] env[62820]: DEBUG nova.network.neutron [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1302.279555] env[62820]: DEBUG nova.compute.manager [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Received event network-changed-7f1b810c-dc19-4971-a532-bdac241941cf {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1302.279679] env[62820]: DEBUG nova.compute.manager [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Refreshing instance network info cache due to event network-changed-7f1b810c-dc19-4971-a532-bdac241941cf. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1302.280024] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Acquiring lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1302.280115] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Acquired lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.280355] env[62820]: DEBUG nova.network.neutron [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Refreshing network info cache for port 7f1b810c-dc19-4971-a532-bdac241941cf {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1302.430389] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e72adfe3-d3c8-491e-bb22-64d9a0453540 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.999s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.431234] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695018, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600515} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.432760] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1302.433148] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1302.433497] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40614727-164f-497e-9f21-9ffca927c4d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.443266] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1302.443266] env[62820]: value = "task-1695019" [ 1302.443266] env[62820]: _type = "Task" [ 1302.443266] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.443266] env[62820]: DEBUG nova.policy [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1b1df2d99e84225bd883b4548870699', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9abf5f08d97547138971101636e32adc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1302.457402] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695019, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.467635] env[62820]: DEBUG nova.network.neutron [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Updating instance_info_cache with network_info: [{"id": "4a487d2d-4f2d-43bd-9691-dd7219d7b997", "address": "fa:16:3e:ca:e2:f6", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a487d2d-4f", "ovs_interfaceid": "4a487d2d-4f2d-43bd-9691-dd7219d7b997", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.498050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.750793] env[62820]: DEBUG nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1302.768439] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.768677] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.784651] env[62820]: DEBUG nova.network.neutron [-] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.936697] env[62820]: DEBUG nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1302.957333] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695019, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123899} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.957680] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1302.958666] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a264e3c-c27e-4c30-98b1-c43664d4f73c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.976764] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.977107] env[62820]: DEBUG nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Instance network_info: |[{"id": "4a487d2d-4f2d-43bd-9691-dd7219d7b997", "address": "fa:16:3e:ca:e2:f6", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a487d2d-4f", "ovs_interfaceid": "4a487d2d-4f2d-43bd-9691-dd7219d7b997", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1302.986876] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1302.990620] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:e2:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a487d2d-4f2d-43bd-9691-dd7219d7b997', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1302.999160] env[62820]: DEBUG oslo.service.loopingcall [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1302.999160] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1f528ab-7e6f-4487-b65d-b0adaf27bc2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.012977] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1303.013243] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3486355e-0b28-4df4-8b9e-450bb480233b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.034210] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1303.034210] env[62820]: value = "task-1695020" [ 1303.034210] env[62820]: _type = "Task" [ 1303.034210] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.041242] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1303.041242] env[62820]: value = "task-1695021" [ 1303.041242] env[62820]: _type = "Task" [ 1303.041242] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.051454] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695020, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.056736] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695021, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.099170] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198e097d-cba2-4ec0-b76a-0ae58fb10882 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.108676] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c1a249-ed00-4519-85ab-3a7d577c4944 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.145479] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquiring lock "42d00bd3-71fa-4c26-a544-489326163d88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.146050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "42d00bd3-71fa-4c26-a544-489326163d88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.147543] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b8c83d-c074-4514-b4bd-fcd2e600a917 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.159740] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a191951c-ce44-4063-b52f-4b9d166f4a19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.180225] env[62820]: DEBUG nova.compute.provider_tree [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1303.287803] env[62820]: INFO nova.compute.manager [-] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Took 1.91 seconds to deallocate network for instance. [ 1303.387203] env[62820]: DEBUG nova.network.neutron [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Updated VIF entry in instance network info cache for port 7f1b810c-dc19-4971-a532-bdac241941cf. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1303.387647] env[62820]: DEBUG nova.network.neutron [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Updating instance_info_cache with network_info: [{"id": "7f1b810c-dc19-4971-a532-bdac241941cf", "address": "fa:16:3e:05:4b:bd", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f1b810c-dc", "ovs_interfaceid": "7f1b810c-dc19-4971-a532-bdac241941cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.458862] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.547840] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695020, 'name': ReconfigVM_Task, 'duration_secs': 0.3804} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.551114] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1303.551771] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e5eeb56-7fe4-474e-936a-8601251bd82a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.564842] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695021, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.564842] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1303.564842] env[62820]: value = "task-1695023" [ 1303.564842] env[62820]: _type = "Task" [ 1303.564842] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.576426] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695023, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.588214] env[62820]: DEBUG nova.network.neutron [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Successfully created port: cc33796e-572d-47c4-99e7-77f5cff4a281 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1303.684210] env[62820]: DEBUG nova.scheduler.client.report [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1303.763175] env[62820]: DEBUG nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1303.798723] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.804592] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1303.804592] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1303.804821] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1303.807442] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1303.807442] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1303.807969] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1303.807969] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1303.808107] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1303.808206] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1303.808380] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1303.808538] env[62820]: DEBUG nova.virt.hardware [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1303.809627] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a36e811-b7e4-4881-8cfb-19b94bda116d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.821925] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f898e334-7959-4283-8603-b269b4d5b35a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.890628] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Releasing lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1303.891175] env[62820]: DEBUG nova.compute.manager [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Received event network-vif-plugged-4a487d2d-4f2d-43bd-9691-dd7219d7b997 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1303.891175] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Acquiring lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.891387] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.891589] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.891740] env[62820]: DEBUG nova.compute.manager [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] No waiting events found dispatching network-vif-plugged-4a487d2d-4f2d-43bd-9691-dd7219d7b997 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1303.893331] env[62820]: WARNING nova.compute.manager [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Received unexpected event network-vif-plugged-4a487d2d-4f2d-43bd-9691-dd7219d7b997 for instance with vm_state building and task_state spawning. [ 1303.893331] env[62820]: DEBUG nova.compute.manager [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Received event network-changed-4a487d2d-4f2d-43bd-9691-dd7219d7b997 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1303.893331] env[62820]: DEBUG nova.compute.manager [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Refreshing instance network info cache due to event network-changed-4a487d2d-4f2d-43bd-9691-dd7219d7b997. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1303.893331] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Acquiring lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.893331] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Acquired lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.894584] env[62820]: DEBUG nova.network.neutron [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Refreshing network info cache for port 4a487d2d-4f2d-43bd-9691-dd7219d7b997 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1304.058375] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695021, 'name': CreateVM_Task, 'duration_secs': 0.530852} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.058631] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1304.060068] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.060068] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.060201] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1304.060415] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02c986e8-a0eb-4c97-9a4a-a26cc6f53491 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.065773] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1304.065773] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520207fd-b53f-6e96-f433-733fa9d4521c" [ 1304.065773] env[62820]: _type = "Task" [ 1304.065773] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.080385] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520207fd-b53f-6e96-f433-733fa9d4521c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.080667] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695023, 'name': Rename_Task, 'duration_secs': 0.169651} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.081030] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1304.081180] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e882e95d-1599-442b-a940-102c40b193eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.088489] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1304.088489] env[62820]: value = "task-1695024" [ 1304.088489] env[62820]: _type = "Task" [ 1304.088489] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.097223] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695024, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.195023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.195023] env[62820]: DEBUG nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1304.196486] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.212s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.199084] env[62820]: INFO nova.compute.claims [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1304.371620] env[62820]: DEBUG nova.network.neutron [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Successfully updated port: 262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.581889] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520207fd-b53f-6e96-f433-733fa9d4521c, 'name': SearchDatastore_Task, 'duration_secs': 0.011619} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.582298] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.582721] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1304.582843] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.582959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.583513] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1304.583513] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76f07463-b299-4fce-9d0b-e49af5bcf171 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.599603] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695024, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.601269] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1304.601510] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1304.602766] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12c93911-7d96-43ef-8eaf-13e43def50e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.609163] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1304.609163] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52449082-ab16-a439-dc3b-408d21d8db5c" [ 1304.609163] env[62820]: _type = "Task" [ 1304.609163] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.617841] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52449082-ab16-a439-dc3b-408d21d8db5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.706667] env[62820]: DEBUG nova.compute.utils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1304.713332] env[62820]: DEBUG nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1304.713511] env[62820]: DEBUG nova.network.neutron [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1304.879041] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquiring lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.879910] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquired lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.879910] env[62820]: DEBUG nova.network.neutron [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1304.962961] env[62820]: DEBUG nova.policy [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'edbfacd43d234e2da658d1a576a92b15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3a21fa99dc4b4c93f73109aafadde6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1305.106678] env[62820]: DEBUG oslo_vmware.api [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695024, 'name': PowerOnVM_Task, 'duration_secs': 0.584824} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.107138] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1305.107253] env[62820]: INFO nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Took 11.18 seconds to spawn the instance on the hypervisor. [ 1305.107444] env[62820]: DEBUG nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1305.108220] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17edfa5a-c959-4b58-ba68-3f2060311c75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.121836] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52449082-ab16-a439-dc3b-408d21d8db5c, 'name': SearchDatastore_Task, 'duration_secs': 0.013061} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.128232] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41045b89-e80b-4423-9ce4-07b8533600e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.133974] env[62820]: DEBUG nova.compute.manager [req-e957fbe4-bdfc-4b7f-83b7-8dc45014236c req-79187483-9bb6-45c0-b09c-4237838e727b service nova] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Received event network-vif-deleted-ddb6608b-05dd-480c-9e52-01ba94622f69 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1305.135275] env[62820]: DEBUG nova.compute.manager [req-e957fbe4-bdfc-4b7f-83b7-8dc45014236c req-79187483-9bb6-45c0-b09c-4237838e727b service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Received event network-vif-plugged-262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1305.136629] env[62820]: DEBUG oslo_concurrency.lockutils [req-e957fbe4-bdfc-4b7f-83b7-8dc45014236c req-79187483-9bb6-45c0-b09c-4237838e727b service nova] Acquiring lock "90ea0c16-739a-4132-ac36-e154a846b9c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.136629] env[62820]: DEBUG oslo_concurrency.lockutils [req-e957fbe4-bdfc-4b7f-83b7-8dc45014236c req-79187483-9bb6-45c0-b09c-4237838e727b service nova] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.137644] env[62820]: DEBUG oslo_concurrency.lockutils [req-e957fbe4-bdfc-4b7f-83b7-8dc45014236c req-79187483-9bb6-45c0-b09c-4237838e727b service nova] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.137946] env[62820]: DEBUG nova.compute.manager [req-e957fbe4-bdfc-4b7f-83b7-8dc45014236c req-79187483-9bb6-45c0-b09c-4237838e727b service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] No waiting events found dispatching network-vif-plugged-262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1305.138144] env[62820]: WARNING nova.compute.manager [req-e957fbe4-bdfc-4b7f-83b7-8dc45014236c req-79187483-9bb6-45c0-b09c-4237838e727b service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Received unexpected event network-vif-plugged-262a6e93-a27f-4189-9a88-cb1c5fe97709 for instance with vm_state building and task_state spawning. [ 1305.142569] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1305.142569] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5206f7c2-d84b-5e4f-8409-27cdbf8b2e65" [ 1305.142569] env[62820]: _type = "Task" [ 1305.142569] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.152348] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5206f7c2-d84b-5e4f-8409-27cdbf8b2e65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.214310] env[62820]: DEBUG nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1305.445268] env[62820]: DEBUG nova.network.neutron [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1305.570344] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d4d0bc-b96c-4fd4-9ba3-df29242f6a45 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.580811] env[62820]: INFO nova.compute.manager [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Rebuilding instance [ 1305.583881] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab6b1b1-6e05-48c8-9e1d-cc15c498c6e2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.630344] env[62820]: DEBUG nova.network.neutron [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Updated VIF entry in instance network info cache for port 4a487d2d-4f2d-43bd-9691-dd7219d7b997. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1305.631624] env[62820]: DEBUG nova.network.neutron [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Updating instance_info_cache with network_info: [{"id": "4a487d2d-4f2d-43bd-9691-dd7219d7b997", "address": "fa:16:3e:ca:e2:f6", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a487d2d-4f", "ovs_interfaceid": "4a487d2d-4f2d-43bd-9691-dd7219d7b997", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.636528] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6457c105-e57f-490e-a742-8b651222aad5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.650176] env[62820]: INFO nova.compute.manager [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Took 16.00 seconds to build instance. [ 1305.658986] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b7d18d-af8e-40a9-a5a7-3b37b69ca2e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.670495] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5206f7c2-d84b-5e4f-8409-27cdbf8b2e65, 'name': SearchDatastore_Task, 'duration_secs': 0.028584} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.677144] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.677144] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86/0ed6ab62-6ae1-4b1a-be2e-a2312334fd86.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1305.677144] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa888b94-a392-44f4-91c4-288ead767ec9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.685685] env[62820]: DEBUG nova.compute.provider_tree [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.693395] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1305.693395] env[62820]: value = "task-1695025" [ 1305.693395] env[62820]: _type = "Task" [ 1305.693395] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.707462] env[62820]: DEBUG nova.compute.manager [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1305.712354] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0cae64-8277-412b-a453-32c4336d0c60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.725397] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.144463] env[62820]: DEBUG oslo_concurrency.lockutils [req-da25dfdd-0066-4391-bb6a-ce2ebfb0eabf req-60175a84-4d19-470c-8f8a-cca8a1a8f95f service nova] Releasing lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.154529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0281eb64-e38c-4eff-9bb1-5d4cce1b93ec tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.521s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.190524] env[62820]: DEBUG nova.scheduler.client.report [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1306.207542] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695025, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.226991] env[62820]: DEBUG nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1306.237323] env[62820]: DEBUG nova.network.neutron [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Updating instance_info_cache with network_info: [{"id": "262a6e93-a27f-4189-9a88-cb1c5fe97709", "address": "fa:16:3e:68:b2:c4", "network": {"id": "a059b348-19da-48d1-baa7-5bf2e657f086", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1831702168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7518fa7b0f743ccaa0a14aee92b88fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262a6e93-a2", "ovs_interfaceid": "262a6e93-a27f-4189-9a88-cb1c5fe97709", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.266213] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1306.266466] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1306.266622] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1306.266802] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1306.267089] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1306.267850] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1306.268678] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1306.268678] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1306.272291] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1306.272633] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1306.272752] env[62820]: DEBUG nova.virt.hardware [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1306.276860] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef5bb10-86aa-4ee5-820f-891cd81b617b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.287381] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f5334a-f892-49f6-83c0-31d5e6184b4e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.594497] env[62820]: DEBUG nova.network.neutron [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Successfully created port: c4c9e7df-633c-43a0-bfc5-21a7a40c4a55 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1306.658728] env[62820]: DEBUG nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1306.695688] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.696323] env[62820]: DEBUG nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1306.699075] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.049s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.703410] env[62820]: INFO nova.compute.claims [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1306.716230] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695025, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.741615] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Releasing lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.741945] env[62820]: DEBUG nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Instance network_info: |[{"id": "262a6e93-a27f-4189-9a88-cb1c5fe97709", "address": "fa:16:3e:68:b2:c4", "network": {"id": "a059b348-19da-48d1-baa7-5bf2e657f086", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1831702168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7518fa7b0f743ccaa0a14aee92b88fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262a6e93-a2", "ovs_interfaceid": "262a6e93-a27f-4189-9a88-cb1c5fe97709", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1306.742579] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1306.743323] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:b2:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0c7eaa8-06f3-40c3-93ae-7593486eb870', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '262a6e93-a27f-4189-9a88-cb1c5fe97709', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.752166] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Creating folder: Project (c7518fa7b0f743ccaa0a14aee92b88fd). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1306.752524] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbc3d6f7-6ffc-413b-9537-130ce9efacc9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.756543] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a7c7ead-7b05-4c9c-a64b-8f3f241c40a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.763843] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1306.763843] env[62820]: value = "task-1695027" [ 1306.763843] env[62820]: _type = "Task" [ 1306.763843] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.769412] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Created folder: Project (c7518fa7b0f743ccaa0a14aee92b88fd) in parent group-v353379. [ 1306.769641] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Creating folder: Instances. Parent ref: group-v353397. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1306.770231] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e382103-a4ad-4ac0-ab40-0969e0405804 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.774973] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695027, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.781842] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Created folder: Instances in parent group-v353397. [ 1306.782116] env[62820]: DEBUG oslo.service.loopingcall [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1306.782306] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1306.782575] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca337920-fb5b-42c5-be2b-eacab9b99e6c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.801347] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.801347] env[62820]: value = "task-1695029" [ 1306.801347] env[62820]: _type = "Task" [ 1306.801347] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.810792] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695029, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.862471] env[62820]: DEBUG nova.network.neutron [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Successfully updated port: cc33796e-572d-47c4-99e7-77f5cff4a281 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1307.196466] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.206905] env[62820]: DEBUG nova.compute.utils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1307.216903] env[62820]: DEBUG nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1307.216903] env[62820]: DEBUG nova.network.neutron [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1307.226588] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695025, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.282845] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695027, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.319438] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695029, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.366289] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquiring lock "refresh_cache-b3d1f811-1d28-40f7-8bf8-c29eb64896c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.366289] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquired lock "refresh_cache-b3d1f811-1d28-40f7-8bf8-c29eb64896c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.366289] env[62820]: DEBUG nova.network.neutron [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1307.511509] env[62820]: DEBUG nova.policy [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9220b3befd9641719c49a131cb86db41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f59ab047666940c6bcb633a221194395', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1307.573054] env[62820]: DEBUG nova.compute.manager [req-a4fcdbb2-1d0e-4428-939e-8e5f854c7db7 req-5f8a3587-1351-424b-ad72-913a5d63f635 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Received event network-vif-plugged-cc33796e-572d-47c4-99e7-77f5cff4a281 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1307.573054] env[62820]: DEBUG oslo_concurrency.lockutils [req-a4fcdbb2-1d0e-4428-939e-8e5f854c7db7 req-5f8a3587-1351-424b-ad72-913a5d63f635 service nova] Acquiring lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.575148] env[62820]: DEBUG oslo_concurrency.lockutils [req-a4fcdbb2-1d0e-4428-939e-8e5f854c7db7 req-5f8a3587-1351-424b-ad72-913a5d63f635 service nova] Lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.575373] env[62820]: DEBUG oslo_concurrency.lockutils [req-a4fcdbb2-1d0e-4428-939e-8e5f854c7db7 req-5f8a3587-1351-424b-ad72-913a5d63f635 service nova] Lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.575578] env[62820]: DEBUG nova.compute.manager [req-a4fcdbb2-1d0e-4428-939e-8e5f854c7db7 req-5f8a3587-1351-424b-ad72-913a5d63f635 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] No waiting events found dispatching network-vif-plugged-cc33796e-572d-47c4-99e7-77f5cff4a281 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1307.575857] env[62820]: WARNING nova.compute.manager [req-a4fcdbb2-1d0e-4428-939e-8e5f854c7db7 req-5f8a3587-1351-424b-ad72-913a5d63f635 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Received unexpected event network-vif-plugged-cc33796e-572d-47c4-99e7-77f5cff4a281 for instance with vm_state building and task_state spawning. [ 1307.715450] env[62820]: DEBUG nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1307.730164] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695025, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.710547} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.730291] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86/0ed6ab62-6ae1-4b1a-be2e-a2312334fd86.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1307.732697] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1307.732697] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5ce3052-5d3b-4c7b-aed9-83cae3200d44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.742549] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1307.742549] env[62820]: value = "task-1695031" [ 1307.742549] env[62820]: _type = "Task" [ 1307.742549] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.757600] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695031, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.777501] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695027, 'name': PowerOffVM_Task, 'duration_secs': 0.830759} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.777822] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1307.778105] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1307.778974] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30846ced-2326-4ead-96d3-cca4e61aabd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.794453] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1307.794842] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94463719-0344-4f11-a0d2-01ba5f959f9e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.814958] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695029, 'name': CreateVM_Task, 'duration_secs': 0.983958} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.815143] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1307.815846] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.815999] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.816327] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1307.816593] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56b8d295-fe09-4d83-9a91-ea9a422be79c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.823434] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1307.823637] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1307.823807] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Deleting the datastore file [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1307.824086] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c6b8acc-a9ce-48ae-b838-ef921c4a3f5f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.829404] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1307.829404] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d3e7ec-4ce8-ddf9-4d17-18039ed161db" [ 1307.829404] env[62820]: _type = "Task" [ 1307.829404] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.834420] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1307.834420] env[62820]: value = "task-1695033" [ 1307.834420] env[62820]: _type = "Task" [ 1307.834420] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.841683] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d3e7ec-4ce8-ddf9-4d17-18039ed161db, 'name': SearchDatastore_Task, 'duration_secs': 0.009508} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.842604] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.842878] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.843342] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.843535] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.843720] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.846865] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-096d5b3b-678c-484b-beb9-81e92b66a5eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.849730] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.855609] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.855609] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1307.855838] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8490c4f1-ef42-4160-b871-067740127a3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.866219] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1307.866219] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525ffc90-9bc1-d226-7ada-411a7708fe3e" [ 1307.866219] env[62820]: _type = "Task" [ 1307.866219] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.877628] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525ffc90-9bc1-d226-7ada-411a7708fe3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.967165] env[62820]: DEBUG nova.network.neutron [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1308.089922] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c343c9-2848-42f5-9c81-ef013758acc9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.098629] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb424f4d-361e-4b7e-b33b-99fc7b883e89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.137141] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6110ed26-562f-427b-804a-5cd72ccf5c65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.143665] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3e90c2-4a8f-4d22-83ba-8f625d7be4be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.161560] env[62820]: DEBUG nova.compute.provider_tree [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1308.255220] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695031, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071048} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.255220] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1308.255512] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b600fca0-f435-4c66-9390-c09071fc4719 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.283537] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86/0ed6ab62-6ae1-4b1a-be2e-a2312334fd86.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1308.285029] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36c986f4-4ddc-40c1-a473-12d9d322b55f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.305923] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1308.305923] env[62820]: value = "task-1695034" [ 1308.305923] env[62820]: _type = "Task" [ 1308.305923] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.315534] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.344378] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108379} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.345726] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1308.345726] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1308.345726] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1308.385237] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525ffc90-9bc1-d226-7ada-411a7708fe3e, 'name': SearchDatastore_Task, 'duration_secs': 0.008484} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.385237] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e083cb19-8517-425e-983a-e445dd0ad714 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.390899] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1308.390899] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526604f4-36aa-a0a9-5db7-26cbf5505fc6" [ 1308.390899] env[62820]: _type = "Task" [ 1308.390899] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.400950] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526604f4-36aa-a0a9-5db7-26cbf5505fc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.486674] env[62820]: DEBUG nova.network.neutron [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Updating instance_info_cache with network_info: [{"id": "cc33796e-572d-47c4-99e7-77f5cff4a281", "address": "fa:16:3e:0e:97:82", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc33796e-57", "ovs_interfaceid": "cc33796e-572d-47c4-99e7-77f5cff4a281", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.665063] env[62820]: DEBUG nova.scheduler.client.report [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1308.685815] env[62820]: DEBUG nova.compute.manager [req-189fabb8-ee24-4770-bc0e-2727382b0150 req-94671393-239e-44ec-a8be-9c9eee98db88 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Received event network-changed-262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1308.686071] env[62820]: DEBUG nova.compute.manager [req-189fabb8-ee24-4770-bc0e-2727382b0150 req-94671393-239e-44ec-a8be-9c9eee98db88 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Refreshing instance network info cache due to event network-changed-262a6e93-a27f-4189-9a88-cb1c5fe97709. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1308.690214] env[62820]: DEBUG oslo_concurrency.lockutils [req-189fabb8-ee24-4770-bc0e-2727382b0150 req-94671393-239e-44ec-a8be-9c9eee98db88 service nova] Acquiring lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.690505] env[62820]: DEBUG oslo_concurrency.lockutils [req-189fabb8-ee24-4770-bc0e-2727382b0150 req-94671393-239e-44ec-a8be-9c9eee98db88 service nova] Acquired lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.691171] env[62820]: DEBUG nova.network.neutron [req-189fabb8-ee24-4770-bc0e-2727382b0150 req-94671393-239e-44ec-a8be-9c9eee98db88 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Refreshing network info cache for port 262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1308.734199] env[62820]: DEBUG nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1308.739944] env[62820]: DEBUG nova.network.neutron [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Successfully created port: 0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1308.773215] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:47:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1026776586',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-2077658260',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1308.773473] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1308.773618] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1308.773835] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1308.773989] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1308.774150] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1308.774359] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1308.774518] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1308.774906] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1308.774974] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1308.775167] env[62820]: DEBUG nova.virt.hardware [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1308.776050] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c497128-0d67-470e-88be-b58d6660c49c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.788426] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d738da51-930a-4248-9133-b12c8e4ed97d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.821969] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695034, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.904253] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526604f4-36aa-a0a9-5db7-26cbf5505fc6, 'name': SearchDatastore_Task, 'duration_secs': 0.010277} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.905139] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.905139] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 90ea0c16-739a-4132-ac36-e154a846b9c2/90ea0c16-739a-4132-ac36-e154a846b9c2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1308.905356] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09f7e25f-ae78-45c3-ace9-a1ab1c6b6ee4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.913300] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1308.913300] env[62820]: value = "task-1695035" [ 1308.913300] env[62820]: _type = "Task" [ 1308.913300] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.923167] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695035, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.993022] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Releasing lock "refresh_cache-b3d1f811-1d28-40f7-8bf8-c29eb64896c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.993022] env[62820]: DEBUG nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Instance network_info: |[{"id": "cc33796e-572d-47c4-99e7-77f5cff4a281", "address": "fa:16:3e:0e:97:82", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc33796e-57", "ovs_interfaceid": "cc33796e-572d-47c4-99e7-77f5cff4a281", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1308.993311] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:97:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc33796e-572d-47c4-99e7-77f5cff4a281', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1308.999496] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Creating folder: Project (9abf5f08d97547138971101636e32adc). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.001194] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-670bd96f-8fc7-42b6-a2e2-96d5349b47dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.013113] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Created folder: Project (9abf5f08d97547138971101636e32adc) in parent group-v353379. [ 1309.013113] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Creating folder: Instances. Parent ref: group-v353400. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.013113] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27a6ff94-1d3e-43bf-a6ce-6e0a9e40ad96 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.021972] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Created folder: Instances in parent group-v353400. [ 1309.022575] env[62820]: DEBUG oslo.service.loopingcall [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1309.022923] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1309.024020] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b722952-fe3b-4453-8883-6e650cdcb0a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.046140] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1309.046140] env[62820]: value = "task-1695038" [ 1309.046140] env[62820]: _type = "Task" [ 1309.046140] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.056154] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695038, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.171209] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.171875] env[62820]: DEBUG nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1309.176961] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.505s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.178888] env[62820]: INFO nova.compute.claims [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1309.336904] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695034, 'name': ReconfigVM_Task, 'duration_secs': 0.548057} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.336904] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86/0ed6ab62-6ae1-4b1a-be2e-a2312334fd86.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1309.336904] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de50ca9c-dc5d-4c21-9fd0-49447a3ea017 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.343635] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1309.343635] env[62820]: value = "task-1695039" [ 1309.343635] env[62820]: _type = "Task" [ 1309.343635] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.360851] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695039, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.393311] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1309.393594] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1309.393799] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1309.394410] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1309.394691] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1309.394896] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1309.395130] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1309.395299] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1309.395472] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1309.395707] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1309.395900] env[62820]: DEBUG nova.virt.hardware [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1309.396836] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc30643b-94d2-4203-b988-a08660ed245e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.410063] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ec69cc-cb0c-4b48-a7cc-bc6bab34a8fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.426601] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695035, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484427} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.433455] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 90ea0c16-739a-4132-ac36-e154a846b9c2/90ea0c16-739a-4132-ac36-e154a846b9c2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1309.433686] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1309.434176] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1309.439890] env[62820]: DEBUG oslo.service.loopingcall [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1309.440710] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80d1dc02-f2f0-49c8-b2c7-18e4ace2d86f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.442312] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1309.442643] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a29cd8fc-2885-43b4-beab-28dde049daf9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.459427] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "a06d736c-a704-46e8-a6f7-85d8be40804f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.459697] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.464744] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1309.464744] env[62820]: value = "task-1695041" [ 1309.464744] env[62820]: _type = "Task" [ 1309.464744] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.464744] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1309.464744] env[62820]: value = "task-1695042" [ 1309.464744] env[62820]: _type = "Task" [ 1309.464744] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.476730] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695042, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.480255] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.561901] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695038, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.678840] env[62820]: DEBUG nova.compute.utils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1309.679656] env[62820]: DEBUG nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1309.679903] env[62820]: DEBUG nova.network.neutron [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1309.761948] env[62820]: DEBUG nova.network.neutron [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Successfully updated port: c4c9e7df-633c-43a0-bfc5-21a7a40c4a55 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1309.855928] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695039, 'name': Rename_Task, 'duration_secs': 0.230168} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.856413] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1309.856954] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0eee9a1-bd92-401b-8134-73aec71718e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.865900] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1309.865900] env[62820]: value = "task-1695043" [ 1309.865900] env[62820]: _type = "Task" [ 1309.865900] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.876499] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.884460] env[62820]: DEBUG nova.policy [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '686e11cbfa7948038f81cbd941167c3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd853b4ff56534a10a13bf7e5becf7d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1309.981064] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094589} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.984358] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1309.984984] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695042, 'name': CreateVM_Task, 'duration_secs': 0.412613} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.985716] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e8935c-1a11-4878-8435-882b29fb8acc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.988158] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1309.988827] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.988827] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.989026] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1309.989686] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a12e1f5-ff8d-4780-afc1-a22297456377 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.008786] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 90ea0c16-739a-4132-ac36-e154a846b9c2/90ea0c16-739a-4132-ac36-e154a846b9c2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1310.009586] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52e36dd8-2270-4aaf-924a-888ca10f00f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.028433] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1310.028433] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5249b205-4716-80fd-ec56-4452ea22bcad" [ 1310.028433] env[62820]: _type = "Task" [ 1310.028433] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.034063] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1310.034063] env[62820]: value = "task-1695044" [ 1310.034063] env[62820]: _type = "Task" [ 1310.034063] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.040315] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5249b205-4716-80fd-ec56-4452ea22bcad, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.040587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.040806] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1310.042107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.042107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.042107] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1310.042107] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c712d22a-5302-456c-94aa-77efaa8de953 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.046727] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695044, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.054822] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695038, 'name': CreateVM_Task, 'duration_secs': 0.531241} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.055149] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1310.055841] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.056030] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.056397] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1310.056595] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2005be8c-778b-45e4-9482-3e9b4b7d264e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.062226] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1310.062226] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52755dcf-b1cb-1e40-40a2-bc391f3eaa3a" [ 1310.062226] env[62820]: _type = "Task" [ 1310.062226] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.062421] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1310.062627] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1310.065991] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cec955a-1da5-46ea-b196-db21f89b8f71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.073679] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52755dcf-b1cb-1e40-40a2-bc391f3eaa3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.074670] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1310.074670] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a760bd-7407-c42e-d195-314a6ef69d07" [ 1310.074670] env[62820]: _type = "Task" [ 1310.074670] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.082787] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a760bd-7407-c42e-d195-314a6ef69d07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.182891] env[62820]: DEBUG nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1310.265955] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquiring lock "refresh_cache-043e14a3-df5a-4098-b147-c6460bb85423" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.265955] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquired lock "refresh_cache-043e14a3-df5a-4098-b147-c6460bb85423" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.266318] env[62820]: DEBUG nova.network.neutron [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1310.356305] env[62820]: DEBUG nova.network.neutron [req-189fabb8-ee24-4770-bc0e-2727382b0150 req-94671393-239e-44ec-a8be-9c9eee98db88 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Updated VIF entry in instance network info cache for port 262a6e93-a27f-4189-9a88-cb1c5fe97709. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1310.356808] env[62820]: DEBUG nova.network.neutron [req-189fabb8-ee24-4770-bc0e-2727382b0150 req-94671393-239e-44ec-a8be-9c9eee98db88 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Updating instance_info_cache with network_info: [{"id": "262a6e93-a27f-4189-9a88-cb1c5fe97709", "address": "fa:16:3e:68:b2:c4", "network": {"id": "a059b348-19da-48d1-baa7-5bf2e657f086", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1831702168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7518fa7b0f743ccaa0a14aee92b88fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262a6e93-a2", "ovs_interfaceid": "262a6e93-a27f-4189-9a88-cb1c5fe97709", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.379595] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695043, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.549436] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695044, 'name': ReconfigVM_Task, 'duration_secs': 0.310976} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.549653] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 90ea0c16-739a-4132-ac36-e154a846b9c2/90ea0c16-739a-4132-ac36-e154a846b9c2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1310.550370] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40533e80-f474-4e90-a288-6f98631d08fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.563666] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1310.563666] env[62820]: value = "task-1695045" [ 1310.563666] env[62820]: _type = "Task" [ 1310.563666] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.586883] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52755dcf-b1cb-1e40-40a2-bc391f3eaa3a, 'name': SearchDatastore_Task, 'duration_secs': 0.011744} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.594220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.594311] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1310.594441] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.594675] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695045, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.605587] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a760bd-7407-c42e-d195-314a6ef69d07, 'name': SearchDatastore_Task, 'duration_secs': 0.009825} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.605989] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6026814d-55b7-4119-8082-e3d990c75b4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.612486] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1310.612486] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529466ee-87f1-bbf7-09b4-2621754abc0c" [ 1310.612486] env[62820]: _type = "Task" [ 1310.612486] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.627745] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529466ee-87f1-bbf7-09b4-2621754abc0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.638185] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e20c961-034f-4992-8ec8-3a7c48979fb6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.647919] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18711bf4-afb0-4503-b512-02119a75a9a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.684163] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb980338-bcc8-4968-aa30-166ad009f499 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.694824] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8196803-d669-42ff-b83d-6d36bb450b93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.715942] env[62820]: DEBUG nova.compute.provider_tree [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1310.862366] env[62820]: DEBUG oslo_concurrency.lockutils [req-189fabb8-ee24-4770-bc0e-2727382b0150 req-94671393-239e-44ec-a8be-9c9eee98db88 service nova] Releasing lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.880231] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695043, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.897642] env[62820]: DEBUG nova.network.neutron [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1310.961079] env[62820]: DEBUG nova.network.neutron [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Successfully created port: 05fc8ccf-bb2b-4348-898d-795b93e333a7 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1310.983997] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "93098210-ca91-41b4-9b12-96fa105a2ab3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1310.983997] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "93098210-ca91-41b4-9b12-96fa105a2ab3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.081560] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695045, 'name': Rename_Task, 'duration_secs': 0.177166} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.082451] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1311.082638] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f34b3174-e080-450d-94b4-20de63ec8c93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.091879] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1311.091879] env[62820]: value = "task-1695046" [ 1311.091879] env[62820]: _type = "Task" [ 1311.091879] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.102016] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.129276] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529466ee-87f1-bbf7-09b4-2621754abc0c, 'name': SearchDatastore_Task, 'duration_secs': 0.01153} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.129537] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.129896] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657/aacc6f1c-56d6-43b9-9c40-5ea49b40a657.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1311.130246] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.130299] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.132402] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4375ca7-d3b3-45f0-afb2-1551d5c7921c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.132851] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1320983f-3602-4bef-b934-b1829fb208fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.142478] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1311.142478] env[62820]: value = "task-1695047" [ 1311.142478] env[62820]: _type = "Task" [ 1311.142478] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.147929] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.148268] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1311.149573] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-199e66e5-33d8-4cca-8e61-c67924bf2f41 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.155937] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.163613] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1311.163613] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ac57a0-e24f-06a4-e22a-82076eb474f0" [ 1311.163613] env[62820]: _type = "Task" [ 1311.163613] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.173597] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ac57a0-e24f-06a4-e22a-82076eb474f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.195413] env[62820]: DEBUG nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1311.219026] env[62820]: DEBUG nova.scheduler.client.report [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1311.240798] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1311.242149] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1311.242319] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1311.242560] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1311.242988] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1311.242988] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1311.243170] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1311.243393] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1311.243979] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1311.243979] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1311.244257] env[62820]: DEBUG nova.virt.hardware [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1311.245763] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e12d31-745a-422c-a7f1-1bdf50f9f1b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.256542] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06955e7c-18e6-4fdb-bd0e-26665d27d87a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.378441] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695043, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.604457] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695046, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.656607] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695047, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.659217] env[62820]: DEBUG nova.network.neutron [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Updating instance_info_cache with network_info: [{"id": "c4c9e7df-633c-43a0-bfc5-21a7a40c4a55", "address": "fa:16:3e:b3:e6:6c", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.104", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4c9e7df-63", "ovs_interfaceid": "c4c9e7df-633c-43a0-bfc5-21a7a40c4a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.685485] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ac57a0-e24f-06a4-e22a-82076eb474f0, 'name': SearchDatastore_Task, 'duration_secs': 0.023744} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.687876] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ebd1306-3570-436c-a158-b0d40d6640ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.699725] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1311.699725] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52211bb1-3d79-325f-dc67-a8234a8588fa" [ 1311.699725] env[62820]: _type = "Task" [ 1311.699725] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.707465] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52211bb1-3d79-325f-dc67-a8234a8588fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.727231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.727231] env[62820]: DEBUG nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1311.730192] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.951s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.732534] env[62820]: INFO nova.compute.claims [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1311.740028] env[62820]: DEBUG nova.compute.manager [req-2f508ddf-7956-493e-bfef-3923332a491b req-ff578ed2-75af-454d-8105-d8cd5f3da319 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Received event network-changed-cc33796e-572d-47c4-99e7-77f5cff4a281 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1311.740028] env[62820]: DEBUG nova.compute.manager [req-2f508ddf-7956-493e-bfef-3923332a491b req-ff578ed2-75af-454d-8105-d8cd5f3da319 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Refreshing instance network info cache due to event network-changed-cc33796e-572d-47c4-99e7-77f5cff4a281. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1311.740028] env[62820]: DEBUG oslo_concurrency.lockutils [req-2f508ddf-7956-493e-bfef-3923332a491b req-ff578ed2-75af-454d-8105-d8cd5f3da319 service nova] Acquiring lock "refresh_cache-b3d1f811-1d28-40f7-8bf8-c29eb64896c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.740028] env[62820]: DEBUG oslo_concurrency.lockutils [req-2f508ddf-7956-493e-bfef-3923332a491b req-ff578ed2-75af-454d-8105-d8cd5f3da319 service nova] Acquired lock "refresh_cache-b3d1f811-1d28-40f7-8bf8-c29eb64896c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.740028] env[62820]: DEBUG nova.network.neutron [req-2f508ddf-7956-493e-bfef-3923332a491b req-ff578ed2-75af-454d-8105-d8cd5f3da319 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Refreshing network info cache for port cc33796e-572d-47c4-99e7-77f5cff4a281 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.881585] env[62820]: DEBUG oslo_vmware.api [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695043, 'name': PowerOnVM_Task, 'duration_secs': 1.52188} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.881942] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1311.883238] env[62820]: INFO nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Took 13.08 seconds to spawn the instance on the hypervisor. [ 1311.883454] env[62820]: DEBUG nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1311.885138] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535b54e1-71e2-486a-bdeb-4452e29fb47b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.028984] env[62820]: DEBUG nova.network.neutron [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Successfully updated port: 0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1312.104491] env[62820]: DEBUG oslo_vmware.api [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695046, 'name': PowerOnVM_Task, 'duration_secs': 0.821969} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.104893] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1312.106493] env[62820]: INFO nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Took 10.81 seconds to spawn the instance on the hypervisor. [ 1312.107174] env[62820]: DEBUG nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1312.108838] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921605c5-4727-4cef-9998-4fd0a0c12816 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.158399] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695047, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556557} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.160631] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657/aacc6f1c-56d6-43b9-9c40-5ea49b40a657.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1312.161881] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1312.162172] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31869489-080c-46c1-9980-7d11bcd6667e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.166084] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Releasing lock "refresh_cache-043e14a3-df5a-4098-b147-c6460bb85423" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.166408] env[62820]: DEBUG nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Instance network_info: |[{"id": "c4c9e7df-633c-43a0-bfc5-21a7a40c4a55", "address": "fa:16:3e:b3:e6:6c", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.104", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4c9e7df-63", "ovs_interfaceid": "c4c9e7df-633c-43a0-bfc5-21a7a40c4a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1312.167248] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:e6:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4c9e7df-633c-43a0-bfc5-21a7a40c4a55', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1312.180127] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Creating folder: Project (1d3a21fa99dc4b4c93f73109aafadde6). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1312.180748] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba98d4bd-520c-4cb5-a7ef-030fe6ec7741 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.188171] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1312.188171] env[62820]: value = "task-1695049" [ 1312.188171] env[62820]: _type = "Task" [ 1312.188171] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.197713] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Created folder: Project (1d3a21fa99dc4b4c93f73109aafadde6) in parent group-v353379. [ 1312.197713] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Creating folder: Instances. Parent ref: group-v353404. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1312.197713] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a10b25b9-355a-456f-a39f-a4fe4d0c6640 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.204455] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695049, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.216436] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52211bb1-3d79-325f-dc67-a8234a8588fa, 'name': SearchDatastore_Task, 'duration_secs': 0.013652} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.216710] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.217083] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b3d1f811-1d28-40f7-8bf8-c29eb64896c0/b3d1f811-1d28-40f7-8bf8-c29eb64896c0.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1312.219526] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9476d230-d521-42ae-9e20-cfc75aacffe0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.221399] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Created folder: Instances in parent group-v353404. [ 1312.221596] env[62820]: DEBUG oslo.service.loopingcall [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1312.221794] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1312.223531] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e90dc68d-bc3d-48ae-83eb-9a153b0b42c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.245733] env[62820]: DEBUG nova.compute.utils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1312.249020] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1312.249020] env[62820]: value = "task-1695052" [ 1312.249020] env[62820]: _type = "Task" [ 1312.249020] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.250399] env[62820]: DEBUG nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1312.250399] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1312.264969] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1312.264969] env[62820]: value = "task-1695053" [ 1312.264969] env[62820]: _type = "Task" [ 1312.264969] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.276953] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.284284] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695053, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.396847] env[62820]: DEBUG nova.policy [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c606501ec683406cb2106e3a1540315e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a069009bc6a741379effea7b50d9e1c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1312.416587] env[62820]: INFO nova.compute.manager [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Took 20.30 seconds to build instance. [ 1312.535037] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.535811] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquired lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.535811] env[62820]: DEBUG nova.network.neutron [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1312.638239] env[62820]: INFO nova.compute.manager [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Took 19.99 seconds to build instance. [ 1312.702069] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695049, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080095} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.702069] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1312.702069] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cdf06d-1200-4054-96ca-1e5c56a2b1e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.731964] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657/aacc6f1c-56d6-43b9-9c40-5ea49b40a657.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1312.731964] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3eb02b85-abfc-423e-811f-1dfcf4e578e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.750520] env[62820]: DEBUG nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1312.762561] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1312.762561] env[62820]: value = "task-1695054" [ 1312.762561] env[62820]: _type = "Task" [ 1312.762561] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.784904] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695052, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.789834] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695053, 'name': CreateVM_Task, 'duration_secs': 0.400344} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.790208] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695054, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.793681] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1312.793681] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.793681] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.793681] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1312.793681] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62fdb2c0-632f-461c-9f86-61a00d3767a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.798761] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1312.798761] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52262beb-c574-7f53-2982-3841c929d093" [ 1312.798761] env[62820]: _type = "Task" [ 1312.798761] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.813655] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52262beb-c574-7f53-2982-3841c929d093, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.923777] env[62820]: DEBUG oslo_concurrency.lockutils [None req-acbe4c9e-c984-4e9d-92b7-03b7cc928207 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.818s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.047800] env[62820]: DEBUG nova.network.neutron [req-2f508ddf-7956-493e-bfef-3923332a491b req-ff578ed2-75af-454d-8105-d8cd5f3da319 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Updated VIF entry in instance network info cache for port cc33796e-572d-47c4-99e7-77f5cff4a281. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.048164] env[62820]: DEBUG nova.network.neutron [req-2f508ddf-7956-493e-bfef-3923332a491b req-ff578ed2-75af-454d-8105-d8cd5f3da319 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Updating instance_info_cache with network_info: [{"id": "cc33796e-572d-47c4-99e7-77f5cff4a281", "address": "fa:16:3e:0e:97:82", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc33796e-57", "ovs_interfaceid": "cc33796e-572d-47c4-99e7-77f5cff4a281", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.140621] env[62820]: DEBUG oslo_concurrency.lockutils [None req-796267a1-01c8-4289-816b-1075ef29eb76 tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.507s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.144287] env[62820]: DEBUG nova.network.neutron [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1313.146790] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715f5ec6-ca1d-40b4-97d4-d1ecba8426b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.156218] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f12c84-d569-4022-b297-cb790a8e91a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.197637] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c789b73-3342-4f3e-83ab-f5b3ead2aafd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.206996] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ded796-747f-4fb2-8996-d9e0002a69cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.226124] env[62820]: DEBUG nova.compute.provider_tree [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.240139] env[62820]: DEBUG nova.compute.manager [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Received event network-vif-plugged-c4c9e7df-633c-43a0-bfc5-21a7a40c4a55 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1313.240139] env[62820]: DEBUG oslo_concurrency.lockutils [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] Acquiring lock "043e14a3-df5a-4098-b147-c6460bb85423-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.240358] env[62820]: DEBUG oslo_concurrency.lockutils [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] Lock "043e14a3-df5a-4098-b147-c6460bb85423-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.240358] env[62820]: DEBUG oslo_concurrency.lockutils [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] Lock "043e14a3-df5a-4098-b147-c6460bb85423-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.241307] env[62820]: DEBUG nova.compute.manager [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] No waiting events found dispatching network-vif-plugged-c4c9e7df-633c-43a0-bfc5-21a7a40c4a55 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1313.241307] env[62820]: WARNING nova.compute.manager [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Received unexpected event network-vif-plugged-c4c9e7df-633c-43a0-bfc5-21a7a40c4a55 for instance with vm_state building and task_state spawning. [ 1313.241817] env[62820]: DEBUG nova.compute.manager [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Received event network-changed-c4c9e7df-633c-43a0-bfc5-21a7a40c4a55 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1313.241817] env[62820]: DEBUG nova.compute.manager [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Refreshing instance network info cache due to event network-changed-c4c9e7df-633c-43a0-bfc5-21a7a40c4a55. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1313.241817] env[62820]: DEBUG oslo_concurrency.lockutils [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] Acquiring lock "refresh_cache-043e14a3-df5a-4098-b147-c6460bb85423" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1313.241913] env[62820]: DEBUG oslo_concurrency.lockutils [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] Acquired lock "refresh_cache-043e14a3-df5a-4098-b147-c6460bb85423" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.242365] env[62820]: DEBUG nova.network.neutron [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Refreshing network info cache for port c4c9e7df-633c-43a0-bfc5-21a7a40c4a55 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.243746] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Successfully created port: 9002583e-7d52-45aa-bb62-0eef82acb545 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1313.274244] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695052, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.794422} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.280069] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b3d1f811-1d28-40f7-8bf8-c29eb64896c0/b3d1f811-1d28-40f7-8bf8-c29eb64896c0.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1313.280352] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1313.281030] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ab6e284-fcc2-41a2-99c7-def3c41708bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.299429] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695054, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.303701] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1313.303701] env[62820]: value = "task-1695055" [ 1313.303701] env[62820]: _type = "Task" [ 1313.303701] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.317415] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695055, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.321613] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52262beb-c574-7f53-2982-3841c929d093, 'name': SearchDatastore_Task, 'duration_secs': 0.057727} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.321903] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.322154] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1313.322391] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1313.322524] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.322711] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1313.323087] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-341a3d4c-28bc-4fa3-a8e9-9ba8f3b4196f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.334127] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1313.334336] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1313.335169] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ad2af6a-5904-424d-bc8d-e0ca5c94a947 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.342798] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1313.342798] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52485a70-cf5a-ce67-f668-b232dab4d3d8" [ 1313.342798] env[62820]: _type = "Task" [ 1313.342798] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.353431] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52485a70-cf5a-ce67-f668-b232dab4d3d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.428143] env[62820]: DEBUG nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1313.553750] env[62820]: DEBUG oslo_concurrency.lockutils [req-2f508ddf-7956-493e-bfef-3923332a491b req-ff578ed2-75af-454d-8105-d8cd5f3da319 service nova] Releasing lock "refresh_cache-b3d1f811-1d28-40f7-8bf8-c29eb64896c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.606354] env[62820]: DEBUG nova.network.neutron [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Updating instance_info_cache with network_info: [{"id": "0e52122a-94ee-4e33-92b4-777d631cef4b", "address": "fa:16:3e:5d:f4:98", "network": {"id": "71e54fc0-96e2-4d3e-819d-c545cdd4f052", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1297533262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f59ab047666940c6bcb633a221194395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e52122a-94", "ovs_interfaceid": "0e52122a-94ee-4e33-92b4-777d631cef4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.646439] env[62820]: DEBUG nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1313.728692] env[62820]: DEBUG nova.scheduler.client.report [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1313.778910] env[62820]: DEBUG nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1313.787540] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695054, 'name': ReconfigVM_Task, 'duration_secs': 0.765219} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.787945] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Reconfigured VM instance instance-00000004 to attach disk [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657/aacc6f1c-56d6-43b9-9c40-5ea49b40a657.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1313.788331] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b93926a-a114-4da8-9f45-17c9668c0c7f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.796399] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1313.796399] env[62820]: value = "task-1695056" [ 1313.796399] env[62820]: _type = "Task" [ 1313.796399] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.806704] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695056, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.818675] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1313.818965] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1313.819155] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1313.819344] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1313.820113] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1313.820301] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1313.820525] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1313.820690] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1313.820862] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1313.821101] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1313.821326] env[62820]: DEBUG nova.virt.hardware [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1313.825434] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e3f3a7-3664-44ab-9493-21c220a13c47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.829055] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695055, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074936} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.829287] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1313.830451] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e12424-2f7f-4f6c-889a-1ad77f4afe15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.836984] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6228c9cb-4049-4ffc-86e8-c99a78ab783c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.859059] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] b3d1f811-1d28-40f7-8bf8-c29eb64896c0/b3d1f811-1d28-40f7-8bf8-c29eb64896c0.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1313.862665] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f3485a5-930d-4027-aa61-d8c5eb582076 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.897733] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52485a70-cf5a-ce67-f668-b232dab4d3d8, 'name': SearchDatastore_Task, 'duration_secs': 0.012163} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.900505] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1313.900505] env[62820]: value = "task-1695057" [ 1313.900505] env[62820]: _type = "Task" [ 1313.900505] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.901881] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1139582a-779e-4eb6-a793-b96ee28bceb5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.916928] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695057, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.918741] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1313.918741] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52509970-c3d2-1373-fa2b-611835dae805" [ 1313.918741] env[62820]: _type = "Task" [ 1313.918741] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.940463] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52509970-c3d2-1373-fa2b-611835dae805, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.964027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.031730] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquiring lock "15e95a20-2729-46c6-a613-32aa353ed329" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.031997] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "15e95a20-2729-46c6-a613-32aa353ed329" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.109918] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Releasing lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.110471] env[62820]: DEBUG nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Instance network_info: |[{"id": "0e52122a-94ee-4e33-92b4-777d631cef4b", "address": "fa:16:3e:5d:f4:98", "network": {"id": "71e54fc0-96e2-4d3e-819d-c545cdd4f052", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1297533262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f59ab047666940c6bcb633a221194395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e52122a-94", "ovs_interfaceid": "0e52122a-94ee-4e33-92b4-777d631cef4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1314.111075] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:f4:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e52122a-94ee-4e33-92b4-777d631cef4b', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1314.119509] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Creating folder: Project (f59ab047666940c6bcb633a221194395). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1314.120138] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5662d086-535d-48a2-b9d8-4bd375118c1f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.136295] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Created folder: Project (f59ab047666940c6bcb633a221194395) in parent group-v353379. [ 1314.136502] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Creating folder: Instances. Parent ref: group-v353407. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1314.136750] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d8ec039-f1df-4017-846a-c2d96e1d3f99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.170864] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.237370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.237858] env[62820]: DEBUG nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1314.245339] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.744s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.247332] env[62820]: DEBUG nova.network.neutron [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Successfully updated port: 05fc8ccf-bb2b-4348-898d-795b93e333a7 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1314.308045] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695056, 'name': Rename_Task, 'duration_secs': 0.267872} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.309493] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1314.310375] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Created folder: Instances in parent group-v353407. [ 1314.310375] env[62820]: DEBUG oslo.service.loopingcall [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1314.310375] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0c62689-fadc-4f99-9457-e47c068ffce7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.312042] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1314.312282] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e68d7700-de0b-477b-8c80-e0a074cce845 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.335987] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1314.335987] env[62820]: value = "task-1695061" [ 1314.335987] env[62820]: _type = "Task" [ 1314.335987] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.335987] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1314.335987] env[62820]: value = "task-1695060" [ 1314.335987] env[62820]: _type = "Task" [ 1314.335987] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.353024] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695060, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.353312] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695061, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.371654] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Successfully created port: 338a4d2a-de27-4e93-bf11-7c91765295a4 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1314.427606] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695057, 'name': ReconfigVM_Task, 'duration_secs': 0.421919} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.433700] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Reconfigured VM instance instance-00000007 to attach disk [datastore1] b3d1f811-1d28-40f7-8bf8-c29eb64896c0/b3d1f811-1d28-40f7-8bf8-c29eb64896c0.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1314.433949] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de62715b-7084-4ae8-9be8-ade961fa7e7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.444706] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52509970-c3d2-1373-fa2b-611835dae805, 'name': SearchDatastore_Task, 'duration_secs': 0.016468} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.446611] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.448114] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 043e14a3-df5a-4098-b147-c6460bb85423/043e14a3-df5a-4098-b147-c6460bb85423.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1314.448114] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1314.448114] env[62820]: value = "task-1695062" [ 1314.448114] env[62820]: _type = "Task" [ 1314.448114] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.448114] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e4202bc-05df-478c-a515-53fd796f085f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.460616] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695062, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.463524] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1314.463524] env[62820]: value = "task-1695063" [ 1314.463524] env[62820]: _type = "Task" [ 1314.463524] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.472456] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.472948] env[62820]: DEBUG nova.network.neutron [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Updated VIF entry in instance network info cache for port c4c9e7df-633c-43a0-bfc5-21a7a40c4a55. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1314.474031] env[62820]: DEBUG nova.network.neutron [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Updating instance_info_cache with network_info: [{"id": "c4c9e7df-633c-43a0-bfc5-21a7a40c4a55", "address": "fa:16:3e:b3:e6:6c", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.104", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4c9e7df-63", "ovs_interfaceid": "c4c9e7df-633c-43a0-bfc5-21a7a40c4a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.747360] env[62820]: DEBUG nova.compute.utils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1314.748852] env[62820]: DEBUG nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1314.751431] env[62820]: DEBUG nova.network.neutron [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1314.758880] env[62820]: INFO nova.compute.claims [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1314.763248] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquiring lock "refresh_cache-7c5d1740-92ba-4d4b-a557-10f8ea58e883" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.763507] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquired lock "refresh_cache-7c5d1740-92ba-4d4b-a557-10f8ea58e883" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.763745] env[62820]: DEBUG nova.network.neutron [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.851458] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695060, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.858444] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695061, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.951259] env[62820]: DEBUG nova.policy [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a09fea1459b84da2b816c77b02b8558a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b34ef1834bd54b64b7448e5a85ef978e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1314.970055] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695062, 'name': Rename_Task, 'duration_secs': 0.240052} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.971688] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1314.971688] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6a355bb-65ae-4989-9c1a-a4af4b104eb8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.976877] env[62820]: DEBUG oslo_concurrency.lockutils [req-524a1f9e-a006-414e-bafd-bedf658edf79 req-61d116cc-7d33-4db7-a425-d7809e4bb76a service nova] Releasing lock "refresh_cache-043e14a3-df5a-4098-b147-c6460bb85423" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.978701] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695063, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.979997] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1314.979997] env[62820]: value = "task-1695064" [ 1314.979997] env[62820]: _type = "Task" [ 1314.979997] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.992323] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.197531] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "6176f083-b61a-40d6-90a0-680b628a1e08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.197775] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "6176f083-b61a-40d6-90a0-680b628a1e08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.270564] env[62820]: INFO nova.compute.resource_tracker [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Updating resource usage from migration 17065b4d-ea93-42e5-aca0-e553248f0e35 [ 1315.274999] env[62820]: DEBUG nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1315.312532] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Successfully created port: 7da55fa7-efc1-42e6-a489-fad614ea19e4 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1315.346741] env[62820]: DEBUG nova.network.neutron [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1315.361308] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695060, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.361308] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695061, 'name': CreateVM_Task, 'duration_secs': 0.664328} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.361546] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1315.362112] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1315.362265] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.362675] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1315.362840] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c10d44e-d1d3-4a08-8f91-0e54d430b21b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.372436] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1315.372436] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527b1086-c039-fefc-6e94-c25c2f8278f6" [ 1315.372436] env[62820]: _type = "Task" [ 1315.372436] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.381978] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527b1086-c039-fefc-6e94-c25c2f8278f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.482232] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585582} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.485630] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 043e14a3-df5a-4098-b147-c6460bb85423/043e14a3-df5a-4098-b147-c6460bb85423.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1315.485908] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1315.486213] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-701ddee8-8f38-4727-bb49-91954c8173f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.496519] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695064, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.498166] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1315.498166] env[62820]: value = "task-1695065" [ 1315.498166] env[62820]: _type = "Task" [ 1315.498166] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.518091] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.755428] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa4009e-ea04-4351-8132-f65e005f4c89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.767734] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c81e50-f14b-4420-94c6-57bd56b208a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.810077] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d64fcd-b708-4f04-a189-25d266f47865 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.818895] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4628a4f0-38ff-4872-aedf-2d73aacb53f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.834558] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1315.849460] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695060, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.884103] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527b1086-c039-fefc-6e94-c25c2f8278f6, 'name': SearchDatastore_Task, 'duration_secs': 0.03065} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.884459] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.884855] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1315.885145] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1315.885145] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.885248] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1315.886439] env[62820]: DEBUG nova.network.neutron [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Updating instance_info_cache with network_info: [{"id": "05fc8ccf-bb2b-4348-898d-795b93e333a7", "address": "fa:16:3e:bd:a9:1f", "network": {"id": "e4ebe926-de4b-4177-872d-d27f37fa2cff", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-458592263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d853b4ff56534a10a13bf7e5becf7d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05fc8ccf-bb", "ovs_interfaceid": "05fc8ccf-bb2b-4348-898d-795b93e333a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.887371] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d09d71b-fb8c-41b6-8d88-19ddeddad8c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.904368] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1315.904551] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1315.905413] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c5d33f5-387a-46ad-9a78-54828215de56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.912773] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1315.912773] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5230bd13-b8b1-3cc4-72ec-7d31ed7321f5" [ 1315.912773] env[62820]: _type = "Task" [ 1315.912773] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.921485] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5230bd13-b8b1-3cc4-72ec-7d31ed7321f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.994783] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695064, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.004381] env[62820]: DEBUG nova.network.neutron [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Successfully created port: f8810d7c-99fa-4aca-b414-846eebdcd345 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1316.010308] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097845} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.011212] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1316.012576] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201d600a-2e1f-452f-8033-8861d5059c2d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.035218] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 043e14a3-df5a-4098-b147-c6460bb85423/043e14a3-df5a-4098-b147-c6460bb85423.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1316.035899] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cc459b7-7aef-4a79-bd74-a1c512a4aa88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.058058] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1316.058058] env[62820]: value = "task-1695066" [ 1316.058058] env[62820]: _type = "Task" [ 1316.058058] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.066729] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695066, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.068485] env[62820]: DEBUG nova.compute.manager [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Received event network-vif-plugged-0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1316.068485] env[62820]: DEBUG oslo_concurrency.lockutils [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] Acquiring lock "2f917745-28ef-4dfe-8c09-45c15a80145d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.068697] env[62820]: DEBUG oslo_concurrency.lockutils [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] Lock "2f917745-28ef-4dfe-8c09-45c15a80145d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.068862] env[62820]: DEBUG oslo_concurrency.lockutils [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] Lock "2f917745-28ef-4dfe-8c09-45c15a80145d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.069048] env[62820]: DEBUG nova.compute.manager [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] No waiting events found dispatching network-vif-plugged-0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1316.069224] env[62820]: WARNING nova.compute.manager [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Received unexpected event network-vif-plugged-0e52122a-94ee-4e33-92b4-777d631cef4b for instance with vm_state building and task_state spawning. [ 1316.069383] env[62820]: DEBUG nova.compute.manager [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Received event network-changed-0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1316.069536] env[62820]: DEBUG nova.compute.manager [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Refreshing instance network info cache due to event network-changed-0e52122a-94ee-4e33-92b4-777d631cef4b. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1316.069712] env[62820]: DEBUG oslo_concurrency.lockutils [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] Acquiring lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.069845] env[62820]: DEBUG oslo_concurrency.lockutils [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] Acquired lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.069996] env[62820]: DEBUG nova.network.neutron [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Refreshing network info cache for port 0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1316.311693] env[62820]: DEBUG nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1316.360518] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1316.360666] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1316.360889] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1316.361566] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1316.361566] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1316.361566] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1316.361779] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1316.361779] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1316.361930] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1316.362167] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1316.362391] env[62820]: DEBUG nova.virt.hardware [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1316.363427] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc978960-5f98-4e96-80b3-ae4c260734af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.371297] env[62820]: ERROR nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [req-dd624ed3-935f-4968-8002-491fdef77c13] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dd624ed3-935f-4968-8002-491fdef77c13"}]} [ 1316.372699] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695060, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.385559] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98edb4f-9527-437c-8970-14b7a2179a21 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.392160] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Releasing lock "refresh_cache-7c5d1740-92ba-4d4b-a557-10f8ea58e883" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.392160] env[62820]: DEBUG nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Instance network_info: |[{"id": "05fc8ccf-bb2b-4348-898d-795b93e333a7", "address": "fa:16:3e:bd:a9:1f", "network": {"id": "e4ebe926-de4b-4177-872d-d27f37fa2cff", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-458592263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d853b4ff56534a10a13bf7e5becf7d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05fc8ccf-bb", "ovs_interfaceid": "05fc8ccf-bb2b-4348-898d-795b93e333a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1316.392316] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:a9:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c2b89fb-df8c-47c0-83ae-44291236feb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05fc8ccf-bb2b-4348-898d-795b93e333a7', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1316.399344] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Creating folder: Project (d853b4ff56534a10a13bf7e5becf7d0c). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1316.409071] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1316.411740] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-160a17c8-2cf6-403e-8cb8-5fa0c4e2ff6b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.424262] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5230bd13-b8b1-3cc4-72ec-7d31ed7321f5, 'name': SearchDatastore_Task, 'duration_secs': 0.048606} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.425145] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d77997e1-ee6f-40d0-958b-e7331c334257 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.428921] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Created folder: Project (d853b4ff56534a10a13bf7e5becf7d0c) in parent group-v353379. [ 1316.429141] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Creating folder: Instances. Parent ref: group-v353410. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1316.429760] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-451f28f9-d134-4bb8-8e0d-229d9cda21af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.433097] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1316.433338] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1316.436805] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1316.436805] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5203a3ff-0110-0126-e465-fd53fc46c570" [ 1316.436805] env[62820]: _type = "Task" [ 1316.436805] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.441127] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Created folder: Instances in parent group-v353410. [ 1316.441364] env[62820]: DEBUG oslo.service.loopingcall [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1316.441970] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1316.442374] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89b39463-5eb3-4fcd-b687-b1a6260deb9c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.460764] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1316.462854] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5203a3ff-0110-0126-e465-fd53fc46c570, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.472037] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1316.472037] env[62820]: value = "task-1695069" [ 1316.472037] env[62820]: _type = "Task" [ 1316.472037] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.487197] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695069, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.492143] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1316.501111] env[62820]: DEBUG oslo_vmware.api [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695064, 'name': PowerOnVM_Task, 'duration_secs': 1.42435} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.501431] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1316.501655] env[62820]: INFO nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Took 12.74 seconds to spawn the instance on the hypervisor. [ 1316.501878] env[62820]: DEBUG nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1316.503060] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd252f52-ad45-4097-b5ed-3d6250199324 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.588274] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695066, 'name': ReconfigVM_Task, 'duration_secs': 0.359569} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.591726] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 043e14a3-df5a-4098-b147-c6460bb85423/043e14a3-df5a-4098-b147-c6460bb85423.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1316.594200] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10e1c315-be09-4612-ba28-4fb37441a426 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.606783] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1316.606783] env[62820]: value = "task-1695070" [ 1316.606783] env[62820]: _type = "Task" [ 1316.606783] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.622032] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695070, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.864604] env[62820]: DEBUG oslo_vmware.api [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695060, 'name': PowerOnVM_Task, 'duration_secs': 2.499373} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.865636] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1316.865890] env[62820]: DEBUG nova.compute.manager [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1316.867264] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2579634d-5908-4839-90df-7b25b4f392d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.948906] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5203a3ff-0110-0126-e465-fd53fc46c570, 'name': SearchDatastore_Task, 'duration_secs': 0.050886} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.949528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.949795] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2f917745-28ef-4dfe-8c09-45c15a80145d/2f917745-28ef-4dfe-8c09-45c15a80145d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1316.950071] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42ca54c3-81ca-435f-905d-afdb503dea0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.958138] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa057e7-d6ba-4079-8c67-09e68b72077e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.962884] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1316.962884] env[62820]: value = "task-1695071" [ 1316.962884] env[62820]: _type = "Task" [ 1316.962884] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.969666] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab52336a-1dca-4bfe-958c-267acfc66786 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.975724] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.013583] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695069, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.024665] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdbb1f7-b12c-4280-93a3-12727e371539 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.029931] env[62820]: DEBUG nova.compute.manager [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Received event network-vif-plugged-05fc8ccf-bb2b-4348-898d-795b93e333a7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1317.029931] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] Acquiring lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.029931] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] Lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.029931] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] Lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.030419] env[62820]: DEBUG nova.compute.manager [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] No waiting events found dispatching network-vif-plugged-05fc8ccf-bb2b-4348-898d-795b93e333a7 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1317.030419] env[62820]: WARNING nova.compute.manager [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Received unexpected event network-vif-plugged-05fc8ccf-bb2b-4348-898d-795b93e333a7 for instance with vm_state building and task_state spawning. [ 1317.030419] env[62820]: DEBUG nova.compute.manager [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Received event network-changed-05fc8ccf-bb2b-4348-898d-795b93e333a7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1317.030536] env[62820]: DEBUG nova.compute.manager [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Refreshing instance network info cache due to event network-changed-05fc8ccf-bb2b-4348-898d-795b93e333a7. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1317.030630] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] Acquiring lock "refresh_cache-7c5d1740-92ba-4d4b-a557-10f8ea58e883" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.030761] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] Acquired lock "refresh_cache-7c5d1740-92ba-4d4b-a557-10f8ea58e883" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.030939] env[62820]: DEBUG nova.network.neutron [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Refreshing network info cache for port 05fc8ccf-bb2b-4348-898d-795b93e333a7 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1317.033472] env[62820]: INFO nova.compute.manager [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Took 24.24 seconds to build instance. [ 1317.040492] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3377839-2408-4a55-9a3b-feb4d0257498 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.057892] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1317.114871] env[62820]: DEBUG nova.network.neutron [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Updated VIF entry in instance network info cache for port 0e52122a-94ee-4e33-92b4-777d631cef4b. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1317.115287] env[62820]: DEBUG nova.network.neutron [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Updating instance_info_cache with network_info: [{"id": "0e52122a-94ee-4e33-92b4-777d631cef4b", "address": "fa:16:3e:5d:f4:98", "network": {"id": "71e54fc0-96e2-4d3e-819d-c545cdd4f052", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1297533262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f59ab047666940c6bcb633a221194395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e52122a-94", "ovs_interfaceid": "0e52122a-94ee-4e33-92b4-777d631cef4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1317.121814] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695070, 'name': Rename_Task, 'duration_secs': 0.190982} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.122398] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1317.122645] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8918d5e-5a2a-466f-84d9-2276decd2501 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.130488] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1317.130488] env[62820]: value = "task-1695072" [ 1317.130488] env[62820]: _type = "Task" [ 1317.130488] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.140433] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.393300] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.436535] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Acquiring lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.437363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.488830] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695071, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.492547] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695069, 'name': CreateVM_Task, 'duration_secs': 0.532549} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.494304] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1317.495539] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.495709] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.496085] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1317.496402] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9738274-01dd-4305-985a-ddf59967ef48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.505468] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1317.505468] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ae275d-ef3f-59e5-2b0e-a6d07b1c65d8" [ 1317.505468] env[62820]: _type = "Task" [ 1317.505468] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.516515] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ae275d-ef3f-59e5-2b0e-a6d07b1c65d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.536770] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8078406d-8242-44c5-8035-ae9bc79ce2bf tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.757s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.593909] env[62820]: ERROR nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [req-3f903d0b-28e0-4dd4-848c-dc5b597a490e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3f903d0b-28e0-4dd4-848c-dc5b597a490e"}]} [ 1317.618990] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1317.623625] env[62820]: DEBUG oslo_concurrency.lockutils [req-45072cfa-c529-4e96-b2a4-246c27341dd8 req-9af95070-d786-44d0-994d-3635a8336a84 service nova] Releasing lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.646638] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695072, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.658021] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1317.658021] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1317.673569] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1317.711545] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1317.978121] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696252} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.981241] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2f917745-28ef-4dfe-8c09-45c15a80145d/2f917745-28ef-4dfe-8c09-45c15a80145d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1317.981520] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1317.982897] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e583b96d-4ddb-49a3-badb-3f536afc916e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.990959] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1317.990959] env[62820]: value = "task-1695073" [ 1317.990959] env[62820]: _type = "Task" [ 1317.990959] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.000936] env[62820]: DEBUG nova.network.neutron [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Updated VIF entry in instance network info cache for port 05fc8ccf-bb2b-4348-898d-795b93e333a7. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1318.005024] env[62820]: DEBUG nova.network.neutron [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Updating instance_info_cache with network_info: [{"id": "05fc8ccf-bb2b-4348-898d-795b93e333a7", "address": "fa:16:3e:bd:a9:1f", "network": {"id": "e4ebe926-de4b-4177-872d-d27f37fa2cff", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-458592263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d853b4ff56534a10a13bf7e5becf7d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05fc8ccf-bb", "ovs_interfaceid": "05fc8ccf-bb2b-4348-898d-795b93e333a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.009505] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695073, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.029111] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ae275d-ef3f-59e5-2b0e-a6d07b1c65d8, 'name': SearchDatastore_Task, 'duration_secs': 0.054162} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.032488] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.032488] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1318.032488] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1318.032488] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.033023] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1318.033023] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Successfully updated port: 9002583e-7d52-45aa-bb62-0eef82acb545 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1318.033588] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f4c686a-56e0-4ee5-abb5-634c1403a049 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.040748] env[62820]: DEBUG nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1318.057431] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1318.057711] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1318.062151] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c1f39fd-3eee-44e7-9fbf-7be0d1d57c24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.071690] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1318.071690] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52fd3b1d-c333-fe7a-f65e-1ed8a452202d" [ 1318.071690] env[62820]: _type = "Task" [ 1318.071690] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.084360] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fd3b1d-c333-fe7a-f65e-1ed8a452202d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.112011] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9515056a-6c23-424e-9d9d-862d3fa4bbac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.122799] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60c3b5e-85d0-4908-9912-2bf6aa3491ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.159141] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f615d8-6552-4096-b9c8-2086e85f71c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.170235] env[62820]: DEBUG oslo_vmware.api [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695072, 'name': PowerOnVM_Task, 'duration_secs': 0.673699} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.171455] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fecba5c-576e-468d-840b-aff990608d16 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.176481] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1318.176807] env[62820]: INFO nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Took 11.95 seconds to spawn the instance on the hypervisor. [ 1318.177332] env[62820]: DEBUG nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1318.178175] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5080625-3608-49bd-ac50-e635350d404b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.197593] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1318.266864] env[62820]: DEBUG nova.network.neutron [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Successfully updated port: f8810d7c-99fa-4aca-b414-846eebdcd345 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1318.507063] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695073, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.151473} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.507063] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1318.507940] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e678b168-ffcd-47b4-ac91-cab25bc3a441 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.510840] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb1eb8cf-86c5-4b53-af03-feb8e6116cb3 req-844c55fe-3b27-4cb0-88e8-9cc2dd708973 service nova] Releasing lock "refresh_cache-7c5d1740-92ba-4d4b-a557-10f8ea58e883" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.534384] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 2f917745-28ef-4dfe-8c09-45c15a80145d/2f917745-28ef-4dfe-8c09-45c15a80145d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1318.534384] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b10cbb50-d03b-4834-ad59-7f81ccdaeb58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.572021] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1318.572021] env[62820]: value = "task-1695074" [ 1318.572021] env[62820]: _type = "Task" [ 1318.572021] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.584631] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fd3b1d-c333-fe7a-f65e-1ed8a452202d, 'name': SearchDatastore_Task, 'duration_secs': 0.036411} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.588778] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.589547] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695074, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.589907] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a627ede7-2dde-49f5-8433-23adb9219f54 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.597330] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1318.597330] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5213efae-d820-5db9-beee-e716b45aa383" [ 1318.597330] env[62820]: _type = "Task" [ 1318.597330] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.607027] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5213efae-d820-5db9-beee-e716b45aa383, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.713256] env[62820]: INFO nova.compute.manager [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Took 25.14 seconds to build instance. [ 1318.736835] env[62820]: ERROR nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [req-62d42817-3b26-45e6-877a-8d8a6418e2b5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-62d42817-3b26-45e6-877a-8d8a6418e2b5"}]} [ 1318.760341] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1318.771749] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "refresh_cache-9910a0ea-5ce0-41e9-b449-da729a4c3223" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1318.771945] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquired lock "refresh_cache-9910a0ea-5ce0-41e9-b449-da729a4c3223" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.772148] env[62820]: DEBUG nova.network.neutron [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1318.778367] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1318.778580] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1318.797823] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: 530e377c-b2a9-43fb-acb3-df968bdeaf04 {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1318.830109] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1318.932069] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.932480] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.092216] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.108021] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5213efae-d820-5db9-beee-e716b45aa383, 'name': SearchDatastore_Task, 'duration_secs': 0.016529} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.109026] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1319.109118] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7c5d1740-92ba-4d4b-a557-10f8ea58e883/7c5d1740-92ba-4d4b-a557-10f8ea58e883.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1319.109345] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-588fae4f-dda4-44f2-9165-39800c143dba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.119110] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1319.119110] env[62820]: value = "task-1695075" [ 1319.119110] env[62820]: _type = "Task" [ 1319.119110] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.128420] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.214287] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4b83ff48-be0f-4fd7-9aa2-6e80b8331f8a tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "043e14a3-df5a-4098-b147-c6460bb85423" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.657s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.289091] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d191601c-7547-4b45-9172-386972a25d9b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.300784] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2bff5b5-9ad8-42f9-a35d-d7e976a6fa36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.338946] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd2fe57-c7bc-442e-bdf8-9727321e40a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.349020] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e101af18-56e9-4f0a-bedc-2b591771483e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.366023] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1319.383751] env[62820]: DEBUG nova.network.neutron [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1319.594231] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695074, 'name': ReconfigVM_Task, 'duration_secs': 0.627719} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.595537] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 2f917745-28ef-4dfe-8c09-45c15a80145d/2f917745-28ef-4dfe-8c09-45c15a80145d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1319.596463] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5676436-31f1-44af-9295-3ecb51939a5b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.605730] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquiring lock "519c961c-557e-4796-88da-047c55d6be44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.605730] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "519c961c-557e-4796-88da-047c55d6be44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.612971] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1319.612971] env[62820]: value = "task-1695076" [ 1319.612971] env[62820]: _type = "Task" [ 1319.612971] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.633786] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695076, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.642204] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695075, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.729033] env[62820]: DEBUG nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1319.852662] env[62820]: DEBUG nova.compute.manager [req-05885259-255e-4017-9e5d-604cc8392412 req-902c5594-b309-4b01-a938-034d09e62b4a service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-vif-plugged-9002583e-7d52-45aa-bb62-0eef82acb545 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1319.852662] env[62820]: DEBUG oslo_concurrency.lockutils [req-05885259-255e-4017-9e5d-604cc8392412 req-902c5594-b309-4b01-a938-034d09e62b4a service nova] Acquiring lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.852662] env[62820]: DEBUG oslo_concurrency.lockutils [req-05885259-255e-4017-9e5d-604cc8392412 req-902c5594-b309-4b01-a938-034d09e62b4a service nova] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.852662] env[62820]: DEBUG oslo_concurrency.lockutils [req-05885259-255e-4017-9e5d-604cc8392412 req-902c5594-b309-4b01-a938-034d09e62b4a service nova] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.852662] env[62820]: DEBUG nova.compute.manager [req-05885259-255e-4017-9e5d-604cc8392412 req-902c5594-b309-4b01-a938-034d09e62b4a service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] No waiting events found dispatching network-vif-plugged-9002583e-7d52-45aa-bb62-0eef82acb545 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1319.853378] env[62820]: WARNING nova.compute.manager [req-05885259-255e-4017-9e5d-604cc8392412 req-902c5594-b309-4b01-a938-034d09e62b4a service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received unexpected event network-vif-plugged-9002583e-7d52-45aa-bb62-0eef82acb545 for instance with vm_state building and task_state spawning. [ 1319.904689] env[62820]: ERROR nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [req-78c20d76-69d1-45f6-9c50-350cdc37a452] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-78c20d76-69d1-45f6-9c50-350cdc37a452"}]} [ 1319.905803] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 5.663s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.909939] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.451s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.911620] env[62820]: INFO nova.compute.claims [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1319.941936] env[62820]: INFO nova.compute.manager [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Swapping old allocation on dict_keys(['8a0693d4-1456-4a04-ae15-b1eaea0edd7a']) held by migration 17065b4d-ea93-42e5-aca0-e553248f0e35 for instance [ 1319.947729] env[62820]: DEBUG nova.network.neutron [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Updating instance_info_cache with network_info: [{"id": "f8810d7c-99fa-4aca-b414-846eebdcd345", "address": "fa:16:3e:fb:be:01", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8810d7c-99", "ovs_interfaceid": "f8810d7c-99fa-4aca-b414-846eebdcd345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.987700] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Overwriting current allocation {'allocations': {'8a0693d4-1456-4a04-ae15-b1eaea0edd7a': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 32}}, 'project_id': '04698d19505d400594ce250863e15456', 'user_id': 'b2a98cf26a4949abadead50c7354a638', 'consumer_generation': 1} on consumer 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 {{(pid=62820) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1320.054307] env[62820]: DEBUG nova.compute.manager [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Rescheduling, attempt 1 {{(pid=62820) _reschedule_resize_or_reraise /opt/stack/nova/nova/compute/manager.py:6132}} [ 1320.088845] env[62820]: DEBUG nova.compute.manager [req-a2cfc589-cf04-46a9-9bfb-de2122894621 req-5e663155-25fc-4c45-87af-c86eef3921c5 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-changed-9002583e-7d52-45aa-bb62-0eef82acb545 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1320.089118] env[62820]: DEBUG nova.compute.manager [req-a2cfc589-cf04-46a9-9bfb-de2122894621 req-5e663155-25fc-4c45-87af-c86eef3921c5 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Refreshing instance network info cache due to event network-changed-9002583e-7d52-45aa-bb62-0eef82acb545. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1320.089361] env[62820]: DEBUG oslo_concurrency.lockutils [req-a2cfc589-cf04-46a9-9bfb-de2122894621 req-5e663155-25fc-4c45-87af-c86eef3921c5 service nova] Acquiring lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.089511] env[62820]: DEBUG oslo_concurrency.lockutils [req-a2cfc589-cf04-46a9-9bfb-de2122894621 req-5e663155-25fc-4c45-87af-c86eef3921c5 service nova] Acquired lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.089676] env[62820]: DEBUG nova.network.neutron [req-a2cfc589-cf04-46a9-9bfb-de2122894621 req-5e663155-25fc-4c45-87af-c86eef3921c5 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Refreshing network info cache for port 9002583e-7d52-45aa-bb62-0eef82acb545 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1320.109328] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "58a26c98-cbf9-491f-8d2c-20281c3d7771" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.109657] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "58a26c98-cbf9-491f-8d2c-20281c3d7771" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.129499] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695076, 'name': Rename_Task, 'duration_secs': 0.213491} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.130637] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1320.131473] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5884141-c539-4fce-8ccb-7ddf88147c03 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.136848] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695075, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56858} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.137545] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7c5d1740-92ba-4d4b-a557-10f8ea58e883/7c5d1740-92ba-4d4b-a557-10f8ea58e883.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1320.137905] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1320.138104] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5abdf8e4-e69a-48e6-8d9a-1e92a0987ef7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.143261] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1320.143261] env[62820]: value = "task-1695077" [ 1320.143261] env[62820]: _type = "Task" [ 1320.143261] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.148253] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1320.148253] env[62820]: value = "task-1695078" [ 1320.148253] env[62820]: _type = "Task" [ 1320.148253] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.157134] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.164252] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.262329] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.451175] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Releasing lock "refresh_cache-9910a0ea-5ce0-41e9-b449-da729a4c3223" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1320.451817] env[62820]: DEBUG nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Instance network_info: |[{"id": "f8810d7c-99fa-4aca-b414-846eebdcd345", "address": "fa:16:3e:fb:be:01", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8810d7c-99", "ovs_interfaceid": "f8810d7c-99fa-4aca-b414-846eebdcd345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1320.453024] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:be:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8810d7c-99fa-4aca-b414-846eebdcd345', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1320.464291] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Creating folder: Project (b34ef1834bd54b64b7448e5a85ef978e). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1320.464919] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8d76bf0-3098-404a-a1d1-046b94b904be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.491196] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Created folder: Project (b34ef1834bd54b64b7448e5a85ef978e) in parent group-v353379. [ 1320.491800] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Creating folder: Instances. Parent ref: group-v353413. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1320.491977] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e84a1b4e-de7d-4663-b344-0cfdee5a06e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.505524] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Created folder: Instances in parent group-v353413. [ 1320.506061] env[62820]: DEBUG oslo.service.loopingcall [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1320.506061] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1320.506301] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a04654dd-2be6-4a12-995e-90f2a7aa5c52 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.529927] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1320.529927] env[62820]: value = "task-1695081" [ 1320.529927] env[62820]: _type = "Task" [ 1320.529927] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.539905] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695081, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.566065] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.661844] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.199977} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.665827] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1320.666230] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695077, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.666971] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197e4c64-5c73-47c5-9950-4e5434ded939 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.695859] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 7c5d1740-92ba-4d4b-a557-10f8ea58e883/7c5d1740-92ba-4d4b-a557-10f8ea58e883.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1320.697179] env[62820]: DEBUG nova.network.neutron [req-a2cfc589-cf04-46a9-9bfb-de2122894621 req-5e663155-25fc-4c45-87af-c86eef3921c5 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1320.699149] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86442d97-1de5-48a0-b09d-9fcda1973716 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.721895] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1320.721895] env[62820]: value = "task-1695082" [ 1320.721895] env[62820]: _type = "Task" [ 1320.721895] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.738918] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695082, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.776512] env[62820]: DEBUG nova.compute.manager [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Received event network-vif-plugged-f8810d7c-99fa-4aca-b414-846eebdcd345 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1320.776512] env[62820]: DEBUG oslo_concurrency.lockutils [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] Acquiring lock "9910a0ea-5ce0-41e9-b449-da729a4c3223-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.776512] env[62820]: DEBUG oslo_concurrency.lockutils [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] Lock "9910a0ea-5ce0-41e9-b449-da729a4c3223-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.776776] env[62820]: DEBUG oslo_concurrency.lockutils [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] Lock "9910a0ea-5ce0-41e9-b449-da729a4c3223-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.776776] env[62820]: DEBUG nova.compute.manager [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] No waiting events found dispatching network-vif-plugged-f8810d7c-99fa-4aca-b414-846eebdcd345 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1320.776958] env[62820]: WARNING nova.compute.manager [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Received unexpected event network-vif-plugged-f8810d7c-99fa-4aca-b414-846eebdcd345 for instance with vm_state building and task_state spawning. [ 1320.779933] env[62820]: DEBUG nova.compute.manager [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Received event network-changed-f8810d7c-99fa-4aca-b414-846eebdcd345 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1320.779933] env[62820]: DEBUG nova.compute.manager [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Refreshing instance network info cache due to event network-changed-f8810d7c-99fa-4aca-b414-846eebdcd345. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1320.779933] env[62820]: DEBUG oslo_concurrency.lockutils [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] Acquiring lock "refresh_cache-9910a0ea-5ce0-41e9-b449-da729a4c3223" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.779933] env[62820]: DEBUG oslo_concurrency.lockutils [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] Acquired lock "refresh_cache-9910a0ea-5ce0-41e9-b449-da729a4c3223" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.779933] env[62820]: DEBUG nova.network.neutron [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Refreshing network info cache for port f8810d7c-99fa-4aca-b414-846eebdcd345 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1320.953526] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1320.970093] env[62820]: DEBUG nova.network.neutron [req-a2cfc589-cf04-46a9-9bfb-de2122894621 req-5e663155-25fc-4c45-87af-c86eef3921c5 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.978941] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1320.979177] env[62820]: DEBUG nova.compute.provider_tree [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1320.995240] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: 530e377c-b2a9-43fb-acb3-df968bdeaf04 {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1321.017425] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1321.042802] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695081, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.152641] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Successfully updated port: 338a4d2a-de27-4e93-bf11-7c91765295a4 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1321.158564] env[62820]: DEBUG oslo_vmware.api [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695077, 'name': PowerOnVM_Task, 'duration_secs': 0.73085} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.159763] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1321.159763] env[62820]: INFO nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Took 12.43 seconds to spawn the instance on the hypervisor. [ 1321.159763] env[62820]: DEBUG nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1321.160448] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d163c6-7715-4482-8ac6-03fefecaca4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.178514] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquiring lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.178747] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.237458] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.474421] env[62820]: DEBUG oslo_concurrency.lockutils [req-a2cfc589-cf04-46a9-9bfb-de2122894621 req-5e663155-25fc-4c45-87af-c86eef3921c5 service nova] Releasing lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.498753] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e5771b-f49b-4af6-93b7-b703f4a65fcd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.515237] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82ee241-5e3d-4530-8185-309c3a9d2d93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.568239] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4682b4e2-be6b-4a34-831e-35622dc8096c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.584382] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695081, 'name': CreateVM_Task, 'duration_secs': 0.562317} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.586598] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b2fc4c-d415-4728-a1ff-5776ae633ba8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.590538] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1321.591351] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.592036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.592036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1321.592915] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0dcb4e5-0f65-4972-9c4d-89f739fc6932 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.605376] env[62820]: DEBUG nova.compute.provider_tree [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1321.609061] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1321.609061] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e5fd35-3ade-7970-13fd-90ddfc1e369d" [ 1321.609061] env[62820]: _type = "Task" [ 1321.609061] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.625229] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e5fd35-3ade-7970-13fd-90ddfc1e369d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.693185] env[62820]: INFO nova.compute.manager [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Took 27.75 seconds to build instance. [ 1321.737090] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695082, 'name': ReconfigVM_Task, 'duration_secs': 0.730991} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.737321] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 7c5d1740-92ba-4d4b-a557-10f8ea58e883/7c5d1740-92ba-4d4b-a557-10f8ea58e883.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1321.737875] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1cf6f2c2-0051-4f1a-b9f0-0ba97755d30e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.746628] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1321.746628] env[62820]: value = "task-1695083" [ 1321.746628] env[62820]: _type = "Task" [ 1321.746628] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.756967] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695083, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.902875] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquiring lock "043e14a3-df5a-4098-b147-c6460bb85423" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.903219] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "043e14a3-df5a-4098-b147-c6460bb85423" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.903663] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquiring lock "043e14a3-df5a-4098-b147-c6460bb85423-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.903802] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "043e14a3-df5a-4098-b147-c6460bb85423-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.903875] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "043e14a3-df5a-4098-b147-c6460bb85423-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.906052] env[62820]: INFO nova.compute.manager [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Terminating instance [ 1321.985750] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.986220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.986474] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.986669] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.986986] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.989819] env[62820]: INFO nova.compute.manager [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Terminating instance [ 1322.133067] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e5fd35-3ade-7970-13fd-90ddfc1e369d, 'name': SearchDatastore_Task, 'duration_secs': 0.0511} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.133067] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.133067] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1322.133067] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.133472] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.133472] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1322.133472] env[62820]: DEBUG nova.network.neutron [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Updated VIF entry in instance network info cache for port f8810d7c-99fa-4aca-b414-846eebdcd345. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1322.133578] env[62820]: DEBUG nova.network.neutron [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Updating instance_info_cache with network_info: [{"id": "f8810d7c-99fa-4aca-b414-846eebdcd345", "address": "fa:16:3e:fb:be:01", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8810d7c-99", "ovs_interfaceid": "f8810d7c-99fa-4aca-b414-846eebdcd345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.135113] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-471fff1a-5a3a-4004-9a47-15a30c217237 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.140997] env[62820]: ERROR nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [req-47a5b05c-f062-49f4-8f5d-00ff9e1c2a6a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-47a5b05c-f062-49f4-8f5d-00ff9e1c2a6a"}]} [ 1322.155130] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1322.155407] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1322.156193] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89385532-6557-465b-b20c-3a53b341c743 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.163774] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1322.167624] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1322.167624] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529746da-0398-dcaa-3d45-3d7991db4388" [ 1322.167624] env[62820]: _type = "Task" [ 1322.167624] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.180263] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529746da-0398-dcaa-3d45-3d7991db4388, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.180547] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1322.180646] env[62820]: DEBUG nova.compute.provider_tree [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1322.196240] env[62820]: DEBUG oslo_concurrency.lockutils [None req-932ea96d-ad0c-4054-8281-53630441d11e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "2f917745-28ef-4dfe-8c09-45c15a80145d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.275s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.198432] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1322.222538] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1322.269405] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695083, 'name': Rename_Task, 'duration_secs': 0.502036} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.269405] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1322.269405] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a079725d-69d9-41c1-8b7e-7f798fb58137 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.281578] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1322.281578] env[62820]: value = "task-1695084" [ 1322.281578] env[62820]: _type = "Task" [ 1322.281578] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.292243] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695084, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.409765] env[62820]: DEBUG nova.compute.manager [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1322.410042] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1322.411805] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f9d70d-8d05-463f-8cc6-db7063ae8452 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.426858] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1322.426858] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-465dc024-c161-4716-81fd-485b2104bd63 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.435559] env[62820]: DEBUG oslo_vmware.api [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1322.435559] env[62820]: value = "task-1695085" [ 1322.435559] env[62820]: _type = "Task" [ 1322.435559] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.448987] env[62820]: DEBUG oslo_vmware.api [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.496039] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "refresh_cache-aacc6f1c-56d6-43b9-9c40-5ea49b40a657" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.496235] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquired lock "refresh_cache-aacc6f1c-56d6-43b9-9c40-5ea49b40a657" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.496544] env[62820]: DEBUG nova.network.neutron [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.644214] env[62820]: DEBUG oslo_concurrency.lockutils [req-2c250e05-12d5-42df-9871-ca08d3559242 req-d2422072-8c17-46e2-a3ca-fa13d83ab417 service nova] Releasing lock "refresh_cache-9910a0ea-5ce0-41e9-b449-da729a4c3223" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.679362] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529746da-0398-dcaa-3d45-3d7991db4388, 'name': SearchDatastore_Task, 'duration_secs': 0.017811} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.680331] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76367046-52b0-4870-8168-729cbfe8d549 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.689683] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1322.689683] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c9a0b3-1e3d-0098-e143-10d67e834387" [ 1322.689683] env[62820]: _type = "Task" [ 1322.689683] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.703608] env[62820]: DEBUG nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1322.706217] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c9a0b3-1e3d-0098-e143-10d67e834387, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.736740] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd976847-39d8-4824-8c69-d94cf6f73dd6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.748492] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccbbf89-9e49-48cd-a0c3-11d5bef687d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.789436] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e62faaf-8b0c-410c-a8e6-5952b31f3dd7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.800016] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695084, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.810349] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ac0cc4-f331-424f-89a7-a0906dc0f824 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.821218] env[62820]: DEBUG nova.compute.provider_tree [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1322.902804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquiring lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.902804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1322.902804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquiring lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.902804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1322.902971] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.907843] env[62820]: INFO nova.compute.manager [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Terminating instance [ 1322.946613] env[62820]: DEBUG nova.compute.manager [req-a61ade3a-9c31-487f-843d-0873933ed33e req-89cf09c5-3fa3-4c78-bc8e-f526cad998d9 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Received event network-changed-262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1322.947792] env[62820]: DEBUG nova.compute.manager [req-a61ade3a-9c31-487f-843d-0873933ed33e req-89cf09c5-3fa3-4c78-bc8e-f526cad998d9 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Refreshing instance network info cache due to event network-changed-262a6e93-a27f-4189-9a88-cb1c5fe97709. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1322.947792] env[62820]: DEBUG oslo_concurrency.lockutils [req-a61ade3a-9c31-487f-843d-0873933ed33e req-89cf09c5-3fa3-4c78-bc8e-f526cad998d9 service nova] Acquiring lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.947792] env[62820]: DEBUG oslo_concurrency.lockutils [req-a61ade3a-9c31-487f-843d-0873933ed33e req-89cf09c5-3fa3-4c78-bc8e-f526cad998d9 service nova] Acquired lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.947969] env[62820]: DEBUG nova.network.neutron [req-a61ade3a-9c31-487f-843d-0873933ed33e req-89cf09c5-3fa3-4c78-bc8e-f526cad998d9 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Refreshing network info cache for port 262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1322.954312] env[62820]: DEBUG oslo_vmware.api [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695085, 'name': PowerOffVM_Task, 'duration_secs': 0.269742} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.955456] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1322.955456] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1322.955456] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e25961f9-03e6-440f-8ed2-4a11dc781ba9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.030476] env[62820]: DEBUG nova.network.neutron [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1323.172088] env[62820]: DEBUG nova.network.neutron [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.191410] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1323.191664] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1323.191871] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Deleting the datastore file [datastore1] 043e14a3-df5a-4098-b147-c6460bb85423 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1323.196178] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba0a5ad6-9cb2-4a1e-9dfd-6a439c111f0e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.208039] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c9a0b3-1e3d-0098-e143-10d67e834387, 'name': SearchDatastore_Task, 'duration_secs': 0.019456} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.209348] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.209348] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9910a0ea-5ce0-41e9-b449-da729a4c3223/9910a0ea-5ce0-41e9-b449-da729a4c3223.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1323.209576] env[62820]: DEBUG oslo_vmware.api [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for the task: (returnval){ [ 1323.209576] env[62820]: value = "task-1695087" [ 1323.209576] env[62820]: _type = "Task" [ 1323.209576] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.209709] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8cee34e-2c96-4b58-971b-f904f201f88a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.227766] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1323.227766] env[62820]: value = "task-1695088" [ 1323.227766] env[62820]: _type = "Task" [ 1323.227766] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.239057] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.240296] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.297514] env[62820]: DEBUG oslo_vmware.api [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695084, 'name': PowerOnVM_Task, 'duration_secs': 0.662033} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.297691] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1323.297918] env[62820]: INFO nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Took 12.10 seconds to spawn the instance on the hypervisor. [ 1323.298116] env[62820]: DEBUG nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1323.298959] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5433a6-a088-405e-b43d-edfc1aa31462 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.359868] env[62820]: DEBUG nova.scheduler.client.report [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 35 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1323.362092] env[62820]: DEBUG nova.compute.provider_tree [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 35 to 36 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1323.362092] env[62820]: DEBUG nova.compute.provider_tree [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1323.413641] env[62820]: DEBUG nova.compute.manager [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1323.413641] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1323.414165] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d89539-55f2-4842-b38a-f04be1c8360b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.428309] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.428615] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aece5081-d253-4612-aef7-9b62adc27eca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.437984] env[62820]: DEBUG oslo_vmware.api [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1323.437984] env[62820]: value = "task-1695089" [ 1323.437984] env[62820]: _type = "Task" [ 1323.437984] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.449760] env[62820]: DEBUG oslo_vmware.api [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.507712] env[62820]: DEBUG nova.compute.manager [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-vif-plugged-338a4d2a-de27-4e93-bf11-7c91765295a4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1323.507966] env[62820]: DEBUG oslo_concurrency.lockutils [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] Acquiring lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.508201] env[62820]: DEBUG oslo_concurrency.lockutils [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.508378] env[62820]: DEBUG oslo_concurrency.lockutils [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.508555] env[62820]: DEBUG nova.compute.manager [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] No waiting events found dispatching network-vif-plugged-338a4d2a-de27-4e93-bf11-7c91765295a4 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1323.508719] env[62820]: WARNING nova.compute.manager [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received unexpected event network-vif-plugged-338a4d2a-de27-4e93-bf11-7c91765295a4 for instance with vm_state building and task_state spawning. [ 1323.508877] env[62820]: DEBUG nova.compute.manager [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-changed-338a4d2a-de27-4e93-bf11-7c91765295a4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1323.509119] env[62820]: DEBUG nova.compute.manager [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Refreshing instance network info cache due to event network-changed-338a4d2a-de27-4e93-bf11-7c91765295a4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1323.509337] env[62820]: DEBUG oslo_concurrency.lockutils [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] Acquiring lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.509477] env[62820]: DEBUG oslo_concurrency.lockutils [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] Acquired lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.509748] env[62820]: DEBUG nova.network.neutron [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Refreshing network info cache for port 338a4d2a-de27-4e93-bf11-7c91765295a4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.676119] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Releasing lock "refresh_cache-aacc6f1c-56d6-43b9-9c40-5ea49b40a657" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.678602] env[62820]: DEBUG nova.compute.manager [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1323.678602] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1323.678602] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e685ac-0b94-4382-bd92-c903e7600721 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.699460] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.699823] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9575ab0a-c13c-4200-bbd3-a6d7b59723a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.711067] env[62820]: DEBUG oslo_vmware.api [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1323.711067] env[62820]: value = "task-1695090" [ 1323.711067] env[62820]: _type = "Task" [ 1323.711067] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.726849] env[62820]: DEBUG oslo_vmware.api [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Task: {'id': task-1695087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203971} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.731190] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1323.731190] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1323.731190] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1323.731418] env[62820]: INFO nova.compute.manager [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1323.732403] env[62820]: DEBUG oslo.service.loopingcall [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1323.732403] env[62820]: DEBUG oslo_vmware.api [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.732403] env[62820]: DEBUG nova.compute.manager [-] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1323.733402] env[62820]: DEBUG nova.network.neutron [-] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1323.749445] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695088, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.821028] env[62820]: INFO nova.compute.manager [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Took 28.19 seconds to build instance. [ 1323.869131] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.956s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.869131] env[62820]: DEBUG nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1323.872208] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.072s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.872208] env[62820]: DEBUG nova.objects.instance [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Lazy-loading 'resources' on Instance uuid 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1323.958470] env[62820]: DEBUG oslo_vmware.api [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695089, 'name': PowerOffVM_Task, 'duration_secs': 0.341712} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.958803] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1323.958977] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1323.959249] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4870a771-8118-4c6d-9de3-35b1686b2237 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.047098] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1324.047436] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1324.047608] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Deleting the datastore file [datastore1] b3d1f811-1d28-40f7-8bf8-c29eb64896c0 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1324.048102] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a20147cd-0b5d-48e3-8e0b-9380023e0f77 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.056174] env[62820]: DEBUG oslo_vmware.api [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for the task: (returnval){ [ 1324.056174] env[62820]: value = "task-1695092" [ 1324.056174] env[62820]: _type = "Task" [ 1324.056174] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.067597] env[62820]: DEBUG oslo_vmware.api [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.221332] env[62820]: DEBUG oslo_vmware.api [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695090, 'name': PowerOffVM_Task, 'duration_secs': 0.31336} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.221638] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1324.221767] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1324.222027] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9bd7842-e1ac-4339-a0cd-446ca04818e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.224264] env[62820]: DEBUG nova.network.neutron [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1324.241690] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597192} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.241959] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9910a0ea-5ce0-41e9-b449-da729a4c3223/9910a0ea-5ce0-41e9-b449-da729a4c3223.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1324.242192] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1324.242600] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3057cba4-993d-45c8-ae99-f6988aa1f6c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.250291] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1324.250291] env[62820]: value = "task-1695094" [ 1324.250291] env[62820]: _type = "Task" [ 1324.250291] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.255705] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1324.255996] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1324.256224] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Deleting the datastore file [datastore1] aacc6f1c-56d6-43b9-9c40-5ea49b40a657 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1324.257197] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e062777-1459-4911-b475-335c739320b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.262495] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695094, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.268022] env[62820]: DEBUG oslo_vmware.api [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for the task: (returnval){ [ 1324.268022] env[62820]: value = "task-1695095" [ 1324.268022] env[62820]: _type = "Task" [ 1324.268022] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.276944] env[62820]: DEBUG oslo_vmware.api [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.322734] env[62820]: DEBUG oslo_concurrency.lockutils [None req-71b7fa50-092d-4537-96ca-dce40757fd03 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.710s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.374912] env[62820]: DEBUG nova.compute.utils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1324.381247] env[62820]: DEBUG nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1324.381247] env[62820]: DEBUG nova.network.neutron [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1324.386079] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Successfully updated port: 7da55fa7-efc1-42e6-a489-fad614ea19e4 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1324.485511] env[62820]: DEBUG nova.network.neutron [req-a61ade3a-9c31-487f-843d-0873933ed33e req-89cf09c5-3fa3-4c78-bc8e-f526cad998d9 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Updated VIF entry in instance network info cache for port 262a6e93-a27f-4189-9a88-cb1c5fe97709. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1324.485892] env[62820]: DEBUG nova.network.neutron [req-a61ade3a-9c31-487f-843d-0873933ed33e req-89cf09c5-3fa3-4c78-bc8e-f526cad998d9 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Updating instance_info_cache with network_info: [{"id": "262a6e93-a27f-4189-9a88-cb1c5fe97709", "address": "fa:16:3e:68:b2:c4", "network": {"id": "a059b348-19da-48d1-baa7-5bf2e657f086", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1831702168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7518fa7b0f743ccaa0a14aee92b88fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262a6e93-a2", "ovs_interfaceid": "262a6e93-a27f-4189-9a88-cb1c5fe97709", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.517317] env[62820]: DEBUG nova.network.neutron [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.546573] env[62820]: DEBUG nova.policy [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a09fea1459b84da2b816c77b02b8558a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b34ef1834bd54b64b7448e5a85ef978e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1324.573073] env[62820]: DEBUG oslo_vmware.api [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Task: {'id': task-1695092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348882} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.573073] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1324.573073] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1324.573226] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1324.573891] env[62820]: INFO nova.compute.manager [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1324.573891] env[62820]: DEBUG oslo.service.loopingcall [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1324.573891] env[62820]: DEBUG nova.compute.manager [-] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1324.573891] env[62820]: DEBUG nova.network.neutron [-] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1324.768255] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695094, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073747} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.773593] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1324.782240] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f078441b-05bf-4c29-bb74-1684a4f3c412 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.807461] env[62820]: DEBUG oslo_vmware.api [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Task: {'id': task-1695095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3972} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.824786] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 9910a0ea-5ce0-41e9-b449-da729a4c3223/9910a0ea-5ce0-41e9-b449-da729a4c3223.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1324.828899] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1324.829196] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1324.829549] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1324.829691] env[62820]: INFO nova.compute.manager [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1324.830026] env[62820]: DEBUG oslo.service.loopingcall [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1324.830812] env[62820]: DEBUG nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1324.834613] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28e718b8-62c5-43d6-9bad-5681d64acc23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.861900] env[62820]: DEBUG nova.compute.manager [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1324.862076] env[62820]: DEBUG nova.network.neutron [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1324.883578] env[62820]: DEBUG nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1324.891702] env[62820]: DEBUG nova.network.neutron [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1324.897390] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.900421] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1324.900421] env[62820]: value = "task-1695096" [ 1324.900421] env[62820]: _type = "Task" [ 1324.900421] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.923414] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695096, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.975975] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5345f50-c9d1-4746-899f-7b9d230bc53f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.989597] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf3b1a1-4ac4-4b2d-8180-fbe4d1132d81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.996243] env[62820]: DEBUG oslo_concurrency.lockutils [req-a61ade3a-9c31-487f-843d-0873933ed33e req-89cf09c5-3fa3-4c78-bc8e-f526cad998d9 service nova] Releasing lock "refresh_cache-90ea0c16-739a-4132-ac36-e154a846b9c2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.027407] env[62820]: DEBUG oslo_concurrency.lockutils [req-79b8a27b-b3b4-45cb-805f-577df9d6f495 req-f0a78e26-b8cd-4b02-9ee6-a4ed0493a93d service nova] Releasing lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.027407] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquired lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.027407] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1325.029352] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e0f0ac-fd66-40ac-bcbf-85d43ed81bbf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.041619] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c0dde5-9c14-45c3-8366-e36f72f78f75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.062281] env[62820]: DEBUG nova.compute.provider_tree [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.261442] env[62820]: DEBUG nova.network.neutron [-] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.362536] env[62820]: DEBUG nova.network.neutron [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Successfully created port: 71bc98ce-e716-4517-ade6-5d17b8a032e9 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1325.389985] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.401740] env[62820]: DEBUG nova.network.neutron [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.415410] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695096, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.568128] env[62820]: DEBUG nova.scheduler.client.report [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1325.681448] env[62820]: DEBUG nova.network.neutron [-] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.766099] env[62820]: INFO nova.compute.manager [-] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Took 2.03 seconds to deallocate network for instance. [ 1325.854180] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1325.903530] env[62820]: DEBUG nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1325.909030] env[62820]: INFO nova.compute.manager [-] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Took 1.04 seconds to deallocate network for instance. [ 1325.919960] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695096, 'name': ReconfigVM_Task, 'duration_secs': 0.716427} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.920988] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 9910a0ea-5ce0-41e9-b449-da729a4c3223/9910a0ea-5ce0-41e9-b449-da729a4c3223.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1325.921370] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b2aea00-8ab7-4552-91b2-2fe7fcc1adb0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.931538] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1325.931538] env[62820]: value = "task-1695097" [ 1325.931538] env[62820]: _type = "Task" [ 1325.931538] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.948898] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695097, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.954659] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1325.955419] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1325.955419] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.955419] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1325.955742] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.955742] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1325.956022] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1325.956427] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1325.956427] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1325.956427] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1325.957588] env[62820]: DEBUG nova.virt.hardware [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1325.957848] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b0d91e-79b1-4bc0-8a8f-1b67eb3e7734 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.972285] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1358c5f-0cc4-46cf-a9d8-2553d2fec7ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.009831] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.009831] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.030064] env[62820]: DEBUG nova.compute.manager [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-vif-plugged-7da55fa7-efc1-42e6-a489-fad614ea19e4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1326.030296] env[62820]: DEBUG oslo_concurrency.lockutils [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] Acquiring lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.030495] env[62820]: DEBUG oslo_concurrency.lockutils [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.030656] env[62820]: DEBUG oslo_concurrency.lockutils [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.030821] env[62820]: DEBUG nova.compute.manager [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] No waiting events found dispatching network-vif-plugged-7da55fa7-efc1-42e6-a489-fad614ea19e4 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1326.031071] env[62820]: WARNING nova.compute.manager [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received unexpected event network-vif-plugged-7da55fa7-efc1-42e6-a489-fad614ea19e4 for instance with vm_state building and task_state spawning. [ 1326.033236] env[62820]: DEBUG nova.compute.manager [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-changed-7da55fa7-efc1-42e6-a489-fad614ea19e4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1326.033435] env[62820]: DEBUG nova.compute.manager [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Refreshing instance network info cache due to event network-changed-7da55fa7-efc1-42e6-a489-fad614ea19e4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1326.033616] env[62820]: DEBUG oslo_concurrency.lockutils [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] Acquiring lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.076498] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.078878] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.883s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.080438] env[62820]: INFO nova.compute.claims [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1326.106258] env[62820]: INFO nova.scheduler.client.report [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Deleted allocations for instance 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111 [ 1326.185956] env[62820]: INFO nova.compute.manager [-] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Took 1.61 seconds to deallocate network for instance. [ 1326.274052] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.421979] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.448846] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695097, 'name': Rename_Task, 'duration_secs': 0.259632} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.449157] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1326.449795] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b22d706-aefd-407c-b357-bf22cfe8e9d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.459133] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1326.459133] env[62820]: value = "task-1695098" [ 1326.459133] env[62820]: _type = "Task" [ 1326.459133] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.467203] env[62820]: DEBUG nova.compute.manager [req-0173bb88-6544-4b8c-bf94-8445be312409 req-f28c0c30-7d21-437e-87d8-fdf243eb9b40 service nova] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Received event network-vif-deleted-c4c9e7df-633c-43a0-bfc5-21a7a40c4a55 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1326.467448] env[62820]: DEBUG nova.compute.manager [req-0173bb88-6544-4b8c-bf94-8445be312409 req-f28c0c30-7d21-437e-87d8-fdf243eb9b40 service nova] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Received event network-vif-deleted-cc33796e-572d-47c4-99e7-77f5cff4a281 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1326.481055] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695098, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.617033] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0fcd1bb4-2dcc-49b1-8ea5-c622547be9d2 tempest-DeleteServersAdminTestJSON-1003419482 tempest-DeleteServersAdminTestJSON-1003419482-project-admin] Lock "3c5f66f1-c4e4-4ffd-8979-f7f828dc7111" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.935s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.696845] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.971029] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695098, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.108031] env[62820]: DEBUG nova.network.neutron [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Updating instance_info_cache with network_info: [{"id": "9002583e-7d52-45aa-bb62-0eef82acb545", "address": "fa:16:3e:d6:41:83", "network": {"id": "b171b37c-048f-47ea-89e7-5b448f338c6c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1041085922", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9002583e-7d", "ovs_interfaceid": "9002583e-7d52-45aa-bb62-0eef82acb545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "338a4d2a-de27-4e93-bf11-7c91765295a4", "address": "fa:16:3e:b4:df:59", "network": {"id": "a9792575-7ade-4125-8121-e997d91e2b3e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1569422399", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1430a695-49fb-4905-bc38-db9b869a1a9d", "external-id": "nsx-vlan-transportzone-297", "segmentation_id": 297, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap338a4d2a-de", "ovs_interfaceid": "338a4d2a-de27-4e93-bf11-7c91765295a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7da55fa7-efc1-42e6-a489-fad614ea19e4", "address": "fa:16:3e:43:d0:03", "network": {"id": "b171b37c-048f-47ea-89e7-5b448f338c6c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1041085922", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da55fa7-ef", "ovs_interfaceid": "7da55fa7-efc1-42e6-a489-fad614ea19e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.475549] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695098, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.568627] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9487888e-93c6-48ad-b8b0-51dce184ca1c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.582021] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e281f6-7441-4073-a95f-b085e323dd93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.623331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Releasing lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.623331] env[62820]: DEBUG nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Instance network_info: |[{"id": "9002583e-7d52-45aa-bb62-0eef82acb545", "address": "fa:16:3e:d6:41:83", "network": {"id": "b171b37c-048f-47ea-89e7-5b448f338c6c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1041085922", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9002583e-7d", "ovs_interfaceid": "9002583e-7d52-45aa-bb62-0eef82acb545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "338a4d2a-de27-4e93-bf11-7c91765295a4", "address": "fa:16:3e:b4:df:59", "network": {"id": "a9792575-7ade-4125-8121-e997d91e2b3e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1569422399", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1430a695-49fb-4905-bc38-db9b869a1a9d", "external-id": "nsx-vlan-transportzone-297", "segmentation_id": 297, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap338a4d2a-de", "ovs_interfaceid": "338a4d2a-de27-4e93-bf11-7c91765295a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7da55fa7-efc1-42e6-a489-fad614ea19e4", "address": "fa:16:3e:43:d0:03", "network": {"id": "b171b37c-048f-47ea-89e7-5b448f338c6c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1041085922", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da55fa7-ef", "ovs_interfaceid": "7da55fa7-efc1-42e6-a489-fad614ea19e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1327.623713] env[62820]: DEBUG oslo_concurrency.lockutils [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] Acquired lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.625145] env[62820]: DEBUG nova.network.neutron [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Refreshing network info cache for port 7da55fa7-efc1-42e6-a489-fad614ea19e4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1327.625425] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:41:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '257e5ea7-8b80-4301-9900-a754f1fe2031', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9002583e-7d52-45aa-bb62-0eef82acb545', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:df:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1430a695-49fb-4905-bc38-db9b869a1a9d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '338a4d2a-de27-4e93-bf11-7c91765295a4', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:d0:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '257e5ea7-8b80-4301-9900-a754f1fe2031', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7da55fa7-efc1-42e6-a489-fad614ea19e4', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1327.644842] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Creating folder: Project (a069009bc6a741379effea7b50d9e1c3). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1327.644842] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42c0002-15a7-4eff-af4f-53ed838689bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.645275] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16ef6ecb-d8c1-4e4c-9e23-aa0b53de34b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.659022] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c35f320-95e8-4f82-9bfa-c1f4c49cbb72 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.665264] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Created folder: Project (a069009bc6a741379effea7b50d9e1c3) in parent group-v353379. [ 1327.665264] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Creating folder: Instances. Parent ref: group-v353416. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1327.666397] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef4591eb-aaa3-4789-adbc-829c065b87f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.681644] env[62820]: DEBUG nova.compute.provider_tree [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.684345] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Created folder: Instances in parent group-v353416. [ 1327.684580] env[62820]: DEBUG oslo.service.loopingcall [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.685244] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1327.685342] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64ffad1d-248e-4665-9ead-c5ec878e246d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.712494] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1327.712494] env[62820]: value = "task-1695101" [ 1327.712494] env[62820]: _type = "Task" [ 1327.712494] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.725070] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695101, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.855557] env[62820]: DEBUG nova.network.neutron [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Successfully updated port: 71bc98ce-e716-4517-ade6-5d17b8a032e9 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1327.972162] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695098, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.187735] env[62820]: DEBUG nova.scheduler.client.report [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1328.226956] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.227230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.232395] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695101, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.363249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "refresh_cache-9287b8eb-487d-4f51-9e7c-90c016a1c8e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.363249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquired lock "refresh_cache-9287b8eb-487d-4f51-9e7c-90c016a1c8e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.363249] env[62820]: DEBUG nova.network.neutron [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.477490] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695098, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.671909] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "9068670d-f323-4180-92f9-f19737e955e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.671909] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "9068670d-f323-4180-92f9-f19737e955e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.698019] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.698019] env[62820]: DEBUG nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1328.699481] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.736s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.702058] env[62820]: INFO nova.compute.claims [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1328.718176] env[62820]: DEBUG nova.network.neutron [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Updated VIF entry in instance network info cache for port 7da55fa7-efc1-42e6-a489-fad614ea19e4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1328.718641] env[62820]: DEBUG nova.network.neutron [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Updating instance_info_cache with network_info: [{"id": "9002583e-7d52-45aa-bb62-0eef82acb545", "address": "fa:16:3e:d6:41:83", "network": {"id": "b171b37c-048f-47ea-89e7-5b448f338c6c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1041085922", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9002583e-7d", "ovs_interfaceid": "9002583e-7d52-45aa-bb62-0eef82acb545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "338a4d2a-de27-4e93-bf11-7c91765295a4", "address": "fa:16:3e:b4:df:59", "network": {"id": "a9792575-7ade-4125-8121-e997d91e2b3e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1569422399", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1430a695-49fb-4905-bc38-db9b869a1a9d", "external-id": "nsx-vlan-transportzone-297", "segmentation_id": 297, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap338a4d2a-de", "ovs_interfaceid": "338a4d2a-de27-4e93-bf11-7c91765295a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7da55fa7-efc1-42e6-a489-fad614ea19e4", "address": "fa:16:3e:43:d0:03", "network": {"id": "b171b37c-048f-47ea-89e7-5b448f338c6c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1041085922", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da55fa7-ef", "ovs_interfaceid": "7da55fa7-efc1-42e6-a489-fad614ea19e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.734559] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695101, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.901981] env[62820]: DEBUG nova.network.neutron [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1328.907796] env[62820]: DEBUG nova.compute.manager [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Received event network-vif-plugged-71bc98ce-e716-4517-ade6-5d17b8a032e9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1328.907796] env[62820]: DEBUG oslo_concurrency.lockutils [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] Acquiring lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.907941] env[62820]: DEBUG oslo_concurrency.lockutils [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] Lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.908491] env[62820]: DEBUG oslo_concurrency.lockutils [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] Lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.908491] env[62820]: DEBUG nova.compute.manager [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] No waiting events found dispatching network-vif-plugged-71bc98ce-e716-4517-ade6-5d17b8a032e9 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1328.908800] env[62820]: WARNING nova.compute.manager [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Received unexpected event network-vif-plugged-71bc98ce-e716-4517-ade6-5d17b8a032e9 for instance with vm_state building and task_state spawning. [ 1328.908862] env[62820]: DEBUG nova.compute.manager [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Received event network-changed-71bc98ce-e716-4517-ade6-5d17b8a032e9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1328.909014] env[62820]: DEBUG nova.compute.manager [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Refreshing instance network info cache due to event network-changed-71bc98ce-e716-4517-ade6-5d17b8a032e9. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1328.909244] env[62820]: DEBUG oslo_concurrency.lockutils [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] Acquiring lock "refresh_cache-9287b8eb-487d-4f51-9e7c-90c016a1c8e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.973094] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695098, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.079188] env[62820]: DEBUG nova.network.neutron [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Updating instance_info_cache with network_info: [{"id": "71bc98ce-e716-4517-ade6-5d17b8a032e9", "address": "fa:16:3e:c2:a4:6d", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71bc98ce-e7", "ovs_interfaceid": "71bc98ce-e716-4517-ade6-5d17b8a032e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.178635] env[62820]: DEBUG nova.compute.manager [req-b13e23ed-5cf9-4224-a020-a2e9bfdae879 req-ea526e16-8d2c-4837-aad6-2d484357b4bc service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Received event network-changed-0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1329.178771] env[62820]: DEBUG nova.compute.manager [req-b13e23ed-5cf9-4224-a020-a2e9bfdae879 req-ea526e16-8d2c-4837-aad6-2d484357b4bc service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Refreshing instance network info cache due to event network-changed-0e52122a-94ee-4e33-92b4-777d631cef4b. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1329.179124] env[62820]: DEBUG oslo_concurrency.lockutils [req-b13e23ed-5cf9-4224-a020-a2e9bfdae879 req-ea526e16-8d2c-4837-aad6-2d484357b4bc service nova] Acquiring lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.179286] env[62820]: DEBUG oslo_concurrency.lockutils [req-b13e23ed-5cf9-4224-a020-a2e9bfdae879 req-ea526e16-8d2c-4837-aad6-2d484357b4bc service nova] Acquired lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.180116] env[62820]: DEBUG nova.network.neutron [req-b13e23ed-5cf9-4224-a020-a2e9bfdae879 req-ea526e16-8d2c-4837-aad6-2d484357b4bc service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Refreshing network info cache for port 0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1329.209021] env[62820]: DEBUG nova.compute.utils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1329.211597] env[62820]: DEBUG nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1329.211809] env[62820]: DEBUG nova.network.neutron [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1329.226393] env[62820]: DEBUG oslo_concurrency.lockutils [req-5324ddde-a276-4bc1-8048-8f53d8376a0e req-7a401ed3-123e-4cf8-a16e-7688b614a6b7 service nova] Releasing lock "refresh_cache-b7c52283-eada-47fd-887f-a5ad94a0583a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.235192] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695101, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.269020] env[62820]: DEBUG nova.policy [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb686f74748f456c9cd2d5dce4993280', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2c169e476764215ba27ba5b0381e8e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1329.476947] env[62820]: DEBUG oslo_vmware.api [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695098, 'name': PowerOnVM_Task, 'duration_secs': 2.836637} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.480494] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1329.480772] env[62820]: INFO nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Took 13.17 seconds to spawn the instance on the hypervisor. [ 1329.481395] env[62820]: DEBUG nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1329.486699] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070ae50c-c124-4c4b-bf95-65b54512986e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.491633] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquiring lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.491633] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.585399] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Releasing lock "refresh_cache-9287b8eb-487d-4f51-9e7c-90c016a1c8e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.585821] env[62820]: DEBUG nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Instance network_info: |[{"id": "71bc98ce-e716-4517-ade6-5d17b8a032e9", "address": "fa:16:3e:c2:a4:6d", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71bc98ce-e7", "ovs_interfaceid": "71bc98ce-e716-4517-ade6-5d17b8a032e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1329.586213] env[62820]: DEBUG oslo_concurrency.lockutils [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] Acquired lock "refresh_cache-9287b8eb-487d-4f51-9e7c-90c016a1c8e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.586536] env[62820]: DEBUG nova.network.neutron [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Refreshing network info cache for port 71bc98ce-e716-4517-ade6-5d17b8a032e9 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1329.589922] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:a4:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71bc98ce-e716-4517-ade6-5d17b8a032e9', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.595848] env[62820]: DEBUG oslo.service.loopingcall [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.596681] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1329.596916] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe98001d-0c3f-4e06-bd38-17c53bd3c835 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.621011] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.621011] env[62820]: value = "task-1695102" [ 1329.621011] env[62820]: _type = "Task" [ 1329.621011] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.640670] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695102, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.716273] env[62820]: DEBUG nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1329.740624] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695101, 'name': CreateVM_Task, 'duration_secs': 1.703552} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.740624] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1329.742180] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.742180] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.742390] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1329.742706] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-584859a9-d770-4569-b46c-17d8340df356 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.749330] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1329.749330] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520dca8d-f64f-e809-f0f5-8d81725e6690" [ 1329.749330] env[62820]: _type = "Task" [ 1329.749330] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.771113] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520dca8d-f64f-e809-f0f5-8d81725e6690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.898790] env[62820]: DEBUG nova.network.neutron [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Successfully created port: 712e639f-2aff-4915-9285-ea3d67b8e072 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1330.023768] env[62820]: INFO nova.compute.manager [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Took 31.27 seconds to build instance. [ 1330.030921] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquiring lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.031805] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.137102] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695102, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.261771] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520dca8d-f64f-e809-f0f5-8d81725e6690, 'name': SearchDatastore_Task, 'duration_secs': 0.020172} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.262103] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.262361] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1330.262606] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.262759] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.262956] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.267037] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92f066f7-c670-4e68-b03b-51feae9b39af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.279784] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.279784] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1330.280221] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99da4c10-10f5-4047-8230-2eb8ccfedaca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.294120] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1330.294120] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c1ad32-fcb1-41cc-78d1-b636e3d8ea35" [ 1330.294120] env[62820]: _type = "Task" [ 1330.294120] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.304726] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c1ad32-fcb1-41cc-78d1-b636e3d8ea35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.323884] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2307a43b-4e4a-4cf7-9f09-275b8753f836 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.333067] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2bb570-1291-469f-8f2f-8c9fdd18a110 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.342830] env[62820]: DEBUG nova.network.neutron [req-b13e23ed-5cf9-4224-a020-a2e9bfdae879 req-ea526e16-8d2c-4837-aad6-2d484357b4bc service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Updated VIF entry in instance network info cache for port 0e52122a-94ee-4e33-92b4-777d631cef4b. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1330.343275] env[62820]: DEBUG nova.network.neutron [req-b13e23ed-5cf9-4224-a020-a2e9bfdae879 req-ea526e16-8d2c-4837-aad6-2d484357b4bc service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Updating instance_info_cache with network_info: [{"id": "0e52122a-94ee-4e33-92b4-777d631cef4b", "address": "fa:16:3e:5d:f4:98", "network": {"id": "71e54fc0-96e2-4d3e-819d-c545cdd4f052", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1297533262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f59ab047666940c6bcb633a221194395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e52122a-94", "ovs_interfaceid": "0e52122a-94ee-4e33-92b4-777d631cef4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.373036] env[62820]: DEBUG oslo_concurrency.lockutils [req-b13e23ed-5cf9-4224-a020-a2e9bfdae879 req-ea526e16-8d2c-4837-aad6-2d484357b4bc service nova] Releasing lock "refresh_cache-2f917745-28ef-4dfe-8c09-45c15a80145d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.373948] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf82acd-e1a9-4bc9-be7f-ab81c2fb73ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.386735] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cb1675-7c57-4e7e-b2bd-c9118d4df777 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.407025] env[62820]: DEBUG nova.compute.provider_tree [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1330.531823] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a441cd1e-ac0e-4aff-8fd1-d27d53b321e8 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9910a0ea-5ce0-41e9-b449-da729a4c3223" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.457s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.635143] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695102, 'name': CreateVM_Task, 'duration_secs': 0.676576} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.635418] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1330.636226] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.636447] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.636792] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1330.637126] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8590436b-8c48-456f-879a-b54c4517476c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.647650] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1330.647650] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5298d9ec-d27c-08a3-36fa-111b6716ced3" [ 1330.647650] env[62820]: _type = "Task" [ 1330.647650] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.655838] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5298d9ec-d27c-08a3-36fa-111b6716ced3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.737923] env[62820]: DEBUG nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1330.765018] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1330.765018] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1330.765018] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1330.765018] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1330.765018] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1330.765018] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1330.765018] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1330.765018] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1330.766323] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1330.766659] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1330.766997] env[62820]: DEBUG nova.virt.hardware [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1330.770022] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646d3003-5a64-49f1-b37f-b19943cc1590 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.778270] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211ab415-9a91-4ae9-96bf-d1a50c02b3c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.811174] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c1ad32-fcb1-41cc-78d1-b636e3d8ea35, 'name': SearchDatastore_Task, 'duration_secs': 0.031861} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.811174] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa40587c-bd48-453a-bceb-75aacf87664a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.818222] env[62820]: DEBUG nova.network.neutron [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Updated VIF entry in instance network info cache for port 71bc98ce-e716-4517-ade6-5d17b8a032e9. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1330.818623] env[62820]: DEBUG nova.network.neutron [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Updating instance_info_cache with network_info: [{"id": "71bc98ce-e716-4517-ade6-5d17b8a032e9", "address": "fa:16:3e:c2:a4:6d", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71bc98ce-e7", "ovs_interfaceid": "71bc98ce-e716-4517-ade6-5d17b8a032e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.824731] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1330.824731] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520e18ca-f30b-a458-43a1-4ee784254138" [ 1330.824731] env[62820]: _type = "Task" [ 1330.824731] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.834209] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520e18ca-f30b-a458-43a1-4ee784254138, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.909288] env[62820]: DEBUG nova.scheduler.client.report [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1331.035784] env[62820]: DEBUG nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1331.162652] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5298d9ec-d27c-08a3-36fa-111b6716ced3, 'name': SearchDatastore_Task, 'duration_secs': 0.013226} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.163189] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.163644] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.164150] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.321479] env[62820]: DEBUG oslo_concurrency.lockutils [req-3dfb363c-6c0f-4460-832f-8c1672d1c5c8 req-dc8537be-9399-4f86-b220-645029e93bf7 service nova] Releasing lock "refresh_cache-9287b8eb-487d-4f51-9e7c-90c016a1c8e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.340567] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520e18ca-f30b-a458-43a1-4ee784254138, 'name': SearchDatastore_Task, 'duration_secs': 0.043377} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.340884] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.341190] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b7c52283-eada-47fd-887f-a5ad94a0583a/b7c52283-eada-47fd-887f-a5ad94a0583a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1331.341538] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.341772] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.342048] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1daca7a0-0b6b-4b0f-a682-38aa48fd2ec0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.345830] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb223d68-10b6-4582-95b1-882af3195fd4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.355887] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1331.355887] env[62820]: value = "task-1695103" [ 1331.355887] env[62820]: _type = "Task" [ 1331.355887] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.358561] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.358561] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.361951] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94fe44c6-7e5f-4d00-b272-b82af26e2238 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.369225] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1331.369225] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522d2d88-1cf3-1216-7952-c9057f99f867" [ 1331.369225] env[62820]: _type = "Task" [ 1331.369225] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.374591] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695103, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.382201] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522d2d88-1cf3-1216-7952-c9057f99f867, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.416324] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.717s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.416937] env[62820]: DEBUG nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1331.421359] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.251s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.423256] env[62820]: INFO nova.compute.claims [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1331.565352] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.650911] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.650911] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.869106] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695103, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.884574] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522d2d88-1cf3-1216-7952-c9057f99f867, 'name': SearchDatastore_Task, 'duration_secs': 0.012383} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.885243] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b061edb-c750-4765-8624-1fe73218b282 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.895772] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1331.895772] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523016f8-81bc-f3c6-28a0-26c5d13c62cc" [ 1331.895772] env[62820]: _type = "Task" [ 1331.895772] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.907053] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523016f8-81bc-f3c6-28a0-26c5d13c62cc, 'name': SearchDatastore_Task} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.907936] env[62820]: DEBUG nova.network.neutron [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Successfully updated port: 712e639f-2aff-4915-9285-ea3d67b8e072 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1331.912989] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.913297] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9287b8eb-487d-4f51-9e7c-90c016a1c8e2/9287b8eb-487d-4f51-9e7c-90c016a1c8e2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1331.913916] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6af3340-6f20-4363-a7fa-be8d3fca2467 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.924230] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1331.924230] env[62820]: value = "task-1695104" [ 1331.924230] env[62820]: _type = "Task" [ 1331.924230] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.933463] env[62820]: DEBUG nova.compute.utils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1331.936485] env[62820]: DEBUG nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1331.936485] env[62820]: DEBUG nova.network.neutron [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1331.947500] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.988597] env[62820]: DEBUG nova.policy [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '105864a2c6a242a08cf4d38ba93a88cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fec28e0df25f4d18b8d707ba9849e098', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1331.995300] env[62820]: DEBUG nova.compute.manager [req-885bf0b0-6779-4f88-8db0-4d8735acc6b9 req-53c4177b-a45b-4035-9fb7-df8be159ec24 service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Received event network-vif-plugged-712e639f-2aff-4915-9285-ea3d67b8e072 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1331.995300] env[62820]: DEBUG oslo_concurrency.lockutils [req-885bf0b0-6779-4f88-8db0-4d8735acc6b9 req-53c4177b-a45b-4035-9fb7-df8be159ec24 service nova] Acquiring lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.995300] env[62820]: DEBUG oslo_concurrency.lockutils [req-885bf0b0-6779-4f88-8db0-4d8735acc6b9 req-53c4177b-a45b-4035-9fb7-df8be159ec24 service nova] Lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.995300] env[62820]: DEBUG oslo_concurrency.lockutils [req-885bf0b0-6779-4f88-8db0-4d8735acc6b9 req-53c4177b-a45b-4035-9fb7-df8be159ec24 service nova] Lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.996345] env[62820]: DEBUG nova.compute.manager [req-885bf0b0-6779-4f88-8db0-4d8735acc6b9 req-53c4177b-a45b-4035-9fb7-df8be159ec24 service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] No waiting events found dispatching network-vif-plugged-712e639f-2aff-4915-9285-ea3d67b8e072 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1331.996679] env[62820]: WARNING nova.compute.manager [req-885bf0b0-6779-4f88-8db0-4d8735acc6b9 req-53c4177b-a45b-4035-9fb7-df8be159ec24 service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Received unexpected event network-vif-plugged-712e639f-2aff-4915-9285-ea3d67b8e072 for instance with vm_state building and task_state spawning. [ 1332.380507] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695103, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559309} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.380507] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b7c52283-eada-47fd-887f-a5ad94a0583a/b7c52283-eada-47fd-887f-a5ad94a0583a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1332.380507] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1332.380507] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-062599d6-650d-4db6-85e9-daaf78c35994 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.391039] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1332.391039] env[62820]: value = "task-1695105" [ 1332.391039] env[62820]: _type = "Task" [ 1332.391039] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.404579] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695105, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.416989] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquiring lock "refresh_cache-846e8df9-b925-4d2e-a90e-4e774c35d0b4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.416989] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquired lock "refresh_cache-846e8df9-b925-4d2e-a90e-4e774c35d0b4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.416989] env[62820]: DEBUG nova.network.neutron [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1332.424230] env[62820]: DEBUG nova.network.neutron [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Successfully created port: b9fb1075-80d0-4a63-a82b-80d3eedd8fe5 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1332.436587] env[62820]: DEBUG nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1332.452358] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695104, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.680093] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1332.901532] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16208} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.904230] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1332.905691] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c828fbe-081e-48e2-932b-17fd6ff7ab7c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.937546] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] b7c52283-eada-47fd-887f-a5ad94a0583a/b7c52283-eada-47fd-887f-a5ad94a0583a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1332.940642] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8c59b24-c823-4d57-8f6d-bb1839613798 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.977789] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1332.977789] env[62820]: value = "task-1695106" [ 1332.977789] env[62820]: _type = "Task" [ 1332.977789] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.977789] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695104, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650319} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.978148] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9287b8eb-487d-4f51-9e7c-90c016a1c8e2/9287b8eb-487d-4f51-9e7c-90c016a1c8e2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1332.978218] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1332.981329] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8a58e54-9292-495f-b352-cae92fa07def {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.990846] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695106, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.992157] env[62820]: DEBUG nova.network.neutron [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1332.999082] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1332.999082] env[62820]: value = "task-1695107" [ 1332.999082] env[62820]: _type = "Task" [ 1332.999082] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.014957] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695107, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.028556] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f91dbf-e2df-4c1c-b27d-4dfa40f77c20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.037126] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8f8070-52c3-4699-865d-d47daa79eef5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.075601] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd30eeb5-7524-437a-9d45-49056c2df91c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.085294] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f13148-cbc4-47bc-8dc8-8b04de130f7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.101042] env[62820]: DEBUG nova.compute.provider_tree [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.206424] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.206794] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.235275] env[62820]: DEBUG nova.network.neutron [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Updating instance_info_cache with network_info: [{"id": "712e639f-2aff-4915-9285-ea3d67b8e072", "address": "fa:16:3e:b9:3c:65", "network": {"id": "8ef87dfa-367b-4a0c-9d6f-236010ac2b07", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-250075659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c169e476764215ba27ba5b0381e8e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap712e639f-2a", "ovs_interfaceid": "712e639f-2aff-4915-9285-ea3d67b8e072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.466243] env[62820]: DEBUG nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1333.494375] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695106, 'name': ReconfigVM_Task, 'duration_secs': 0.349709} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.496532] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1333.496769] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1333.496907] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1333.497775] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1333.497775] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1333.497775] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1333.497775] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1333.497775] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1333.497964] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1333.498297] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1333.498297] env[62820]: DEBUG nova.virt.hardware [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1333.498527] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Reconfigured VM instance instance-0000000b to attach disk [datastore1] b7c52283-eada-47fd-887f-a5ad94a0583a/b7c52283-eada-47fd-887f-a5ad94a0583a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1333.500924] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d3d466-52ff-4b70-a367-b97cc847f9db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.504168] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7d5cfc1-736f-4276-8dc4-9643bf42f5a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.519201] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf26e78-60a6-4177-a21e-38fd9300abb8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.523634] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695107, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.19689} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.524781] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1333.525168] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1333.525168] env[62820]: value = "task-1695108" [ 1333.525168] env[62820]: _type = "Task" [ 1333.525168] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.526230] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfeb0989-84b0-4fa3-83cc-af3421d5a370 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.568048] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 9287b8eb-487d-4f51-9e7c-90c016a1c8e2/9287b8eb-487d-4f51-9e7c-90c016a1c8e2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1333.570576] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aca8a82c-cc0f-4c97-9a10-10d7c61219e5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.584852] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695108, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.593350] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1333.593350] env[62820]: value = "task-1695109" [ 1333.593350] env[62820]: _type = "Task" [ 1333.593350] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.602414] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695109, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.604446] env[62820]: DEBUG nova.scheduler.client.report [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1333.676871] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.738754] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Releasing lock "refresh_cache-846e8df9-b925-4d2e-a90e-4e774c35d0b4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.738754] env[62820]: DEBUG nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Instance network_info: |[{"id": "712e639f-2aff-4915-9285-ea3d67b8e072", "address": "fa:16:3e:b9:3c:65", "network": {"id": "8ef87dfa-367b-4a0c-9d6f-236010ac2b07", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-250075659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c169e476764215ba27ba5b0381e8e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap712e639f-2a", "ovs_interfaceid": "712e639f-2aff-4915-9285-ea3d67b8e072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1333.738754] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:3c:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f847601f-7479-48eb-842f-41f94eea8537', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '712e639f-2aff-4915-9285-ea3d67b8e072', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1333.748481] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Creating folder: Project (a2c169e476764215ba27ba5b0381e8e7). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1333.748481] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-016ed0db-94db-445c-93fd-5f73c404e03d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.761630] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Created folder: Project (a2c169e476764215ba27ba5b0381e8e7) in parent group-v353379. [ 1333.761630] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Creating folder: Instances. Parent ref: group-v353420. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1333.761807] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ffdf982-782d-43fe-9fec-012e84f7c47f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.773115] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Created folder: Instances in parent group-v353420. [ 1333.773445] env[62820]: DEBUG oslo.service.loopingcall [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1333.773649] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1333.774306] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64a7f239-b261-48c1-a079-12b4128d236d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.796153] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1333.796153] env[62820]: value = "task-1695112" [ 1333.796153] env[62820]: _type = "Task" [ 1333.796153] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.804872] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695112, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.045264] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695108, 'name': Rename_Task, 'duration_secs': 0.172526} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.045588] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1334.045800] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2aeb1ae6-68ba-4f0d-9974-ee236654cf02 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.055782] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1334.055782] env[62820]: value = "task-1695113" [ 1334.055782] env[62820]: _type = "Task" [ 1334.055782] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.066831] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695113, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.102810] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695109, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.112937] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.691s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.113480] env[62820]: DEBUG nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1334.116949] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.725s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.117093] env[62820]: DEBUG nova.objects.instance [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1334.165230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquiring lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.165482] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.165681] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquiring lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.165951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.166089] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.168084] env[62820]: INFO nova.compute.manager [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Terminating instance [ 1334.308215] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695112, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.356040] env[62820]: DEBUG nova.compute.manager [req-e86c07ab-b925-4edb-a261-d30b047b43c8 req-45d286b1-ab0e-4dfc-a850-5df83629387f service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Received event network-changed-712e639f-2aff-4915-9285-ea3d67b8e072 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1334.356040] env[62820]: DEBUG nova.compute.manager [req-e86c07ab-b925-4edb-a261-d30b047b43c8 req-45d286b1-ab0e-4dfc-a850-5df83629387f service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Refreshing instance network info cache due to event network-changed-712e639f-2aff-4915-9285-ea3d67b8e072. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1334.356040] env[62820]: DEBUG oslo_concurrency.lockutils [req-e86c07ab-b925-4edb-a261-d30b047b43c8 req-45d286b1-ab0e-4dfc-a850-5df83629387f service nova] Acquiring lock "refresh_cache-846e8df9-b925-4d2e-a90e-4e774c35d0b4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.356040] env[62820]: DEBUG oslo_concurrency.lockutils [req-e86c07ab-b925-4edb-a261-d30b047b43c8 req-45d286b1-ab0e-4dfc-a850-5df83629387f service nova] Acquired lock "refresh_cache-846e8df9-b925-4d2e-a90e-4e774c35d0b4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.356040] env[62820]: DEBUG nova.network.neutron [req-e86c07ab-b925-4edb-a261-d30b047b43c8 req-45d286b1-ab0e-4dfc-a850-5df83629387f service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Refreshing network info cache for port 712e639f-2aff-4915-9285-ea3d67b8e072 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1334.430763] env[62820]: DEBUG nova.network.neutron [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Successfully updated port: b9fb1075-80d0-4a63-a82b-80d3eedd8fe5 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1334.575817] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695113, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.611038] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695109, 'name': ReconfigVM_Task, 'duration_secs': 0.95984} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.611038] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 9287b8eb-487d-4f51-9e7c-90c016a1c8e2/9287b8eb-487d-4f51-9e7c-90c016a1c8e2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1334.611038] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b67bb1a-7cf4-418c-854b-27fd592aafe1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.623800] env[62820]: DEBUG nova.compute.utils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1334.628773] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1334.628773] env[62820]: value = "task-1695114" [ 1334.628773] env[62820]: _type = "Task" [ 1334.628773] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.628773] env[62820]: DEBUG nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1334.628773] env[62820]: DEBUG nova.network.neutron [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1334.645555] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695114, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.672693] env[62820]: DEBUG nova.compute.manager [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1334.673012] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1334.674255] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7430e3c8-f41e-4213-8b7f-3a7f69185718 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.683911] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1334.684155] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d84d77c4-cfa9-452f-86a5-789c3e595529 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.693842] env[62820]: DEBUG oslo_vmware.api [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1334.693842] env[62820]: value = "task-1695115" [ 1334.693842] env[62820]: _type = "Task" [ 1334.693842] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.704354] env[62820]: DEBUG oslo_vmware.api [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.716226] env[62820]: DEBUG nova.policy [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f4ef714e4224a4ab88233634c012441', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd7f22f2543c747b29127852290bd498c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1334.811807] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695112, 'name': CreateVM_Task, 'duration_secs': 0.740631} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.811807] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1334.811807] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.811807] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.811807] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1334.811807] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c68eb13-41e4-42ee-ad01-d0d564286e24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.815604] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1334.815604] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52847558-34b0-3cfb-b628-bef7e40684dd" [ 1334.815604] env[62820]: _type = "Task" [ 1334.815604] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.826977] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52847558-34b0-3cfb-b628-bef7e40684dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.937729] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.937729] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquired lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.937729] env[62820]: DEBUG nova.network.neutron [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1335.073554] env[62820]: DEBUG oslo_vmware.api [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695113, 'name': PowerOnVM_Task, 'duration_secs': 0.548344} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.073980] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1335.074315] env[62820]: INFO nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Took 21.30 seconds to spawn the instance on the hypervisor. [ 1335.074606] env[62820]: DEBUG nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1335.075486] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efbf150-0ba1-4bd0-9a32-437603100845 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.137390] env[62820]: DEBUG nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1335.142728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-227a743e-9cb0-49f1-83ac-05ebe2e7c804 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.144316] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.556s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.145896] env[62820]: INFO nova.compute.claims [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1335.155940] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695114, 'name': Rename_Task, 'duration_secs': 0.184022} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.156370] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1335.156733] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43daf1c0-2974-41af-b442-fadf9f818893 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.165030] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1335.165030] env[62820]: value = "task-1695116" [ 1335.165030] env[62820]: _type = "Task" [ 1335.165030] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.169665] env[62820]: DEBUG nova.network.neutron [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Successfully created port: 99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1335.178013] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.207215] env[62820]: DEBUG oslo_vmware.api [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695115, 'name': PowerOffVM_Task, 'duration_secs': 0.196709} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.207215] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1335.207215] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1335.207902] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb0949f1-8d58-4209-9bde-00c80cb93c4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.294689] env[62820]: DEBUG nova.network.neutron [req-e86c07ab-b925-4edb-a261-d30b047b43c8 req-45d286b1-ab0e-4dfc-a850-5df83629387f service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Updated VIF entry in instance network info cache for port 712e639f-2aff-4915-9285-ea3d67b8e072. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.295468] env[62820]: DEBUG nova.network.neutron [req-e86c07ab-b925-4edb-a261-d30b047b43c8 req-45d286b1-ab0e-4dfc-a850-5df83629387f service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Updating instance_info_cache with network_info: [{"id": "712e639f-2aff-4915-9285-ea3d67b8e072", "address": "fa:16:3e:b9:3c:65", "network": {"id": "8ef87dfa-367b-4a0c-9d6f-236010ac2b07", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-250075659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c169e476764215ba27ba5b0381e8e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap712e639f-2a", "ovs_interfaceid": "712e639f-2aff-4915-9285-ea3d67b8e072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.302209] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1335.302829] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1335.303211] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Deleting the datastore file [datastore1] 7c5d1740-92ba-4d4b-a557-10f8ea58e883 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1335.303601] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c91a4ab-79b1-41f8-9f8b-cb63c10221e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.313818] env[62820]: DEBUG oslo_vmware.api [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for the task: (returnval){ [ 1335.313818] env[62820]: value = "task-1695118" [ 1335.313818] env[62820]: _type = "Task" [ 1335.313818] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.324702] env[62820]: DEBUG oslo_vmware.api [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.331114] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52847558-34b0-3cfb-b628-bef7e40684dd, 'name': SearchDatastore_Task, 'duration_secs': 0.018994} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.331478] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.331765] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1335.332061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.332268] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.332501] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1335.333123] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdeaf733-7b35-40ce-8d4d-14a5ed126161 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.351413] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1335.351591] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1335.352335] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-658065e2-cbe4-458e-b176-0f52f3d75668 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.359043] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1335.359043] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523cc361-7bc0-012b-58df-045bd7c0bab7" [ 1335.359043] env[62820]: _type = "Task" [ 1335.359043] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.368019] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523cc361-7bc0-012b-58df-045bd7c0bab7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.514176] env[62820]: DEBUG nova.network.neutron [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1335.603283] env[62820]: INFO nova.compute.manager [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Took 36.97 seconds to build instance. [ 1335.678872] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695116, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.682018] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.682018] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1335.802511] env[62820]: DEBUG oslo_concurrency.lockutils [req-e86c07ab-b925-4edb-a261-d30b047b43c8 req-45d286b1-ab0e-4dfc-a850-5df83629387f service nova] Releasing lock "refresh_cache-846e8df9-b925-4d2e-a90e-4e774c35d0b4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.823782] env[62820]: DEBUG oslo_vmware.api [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Task: {'id': task-1695118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.413291} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.824176] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.824272] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1335.825025] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1335.825025] env[62820]: INFO nova.compute.manager [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1335.825025] env[62820]: DEBUG oslo.service.loopingcall [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.825025] env[62820]: DEBUG nova.compute.manager [-] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1335.825247] env[62820]: DEBUG nova.network.neutron [-] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1335.874548] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523cc361-7bc0-012b-58df-045bd7c0bab7, 'name': SearchDatastore_Task, 'duration_secs': 0.014383} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.875434] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ba57826-c0da-4f56-b9c4-ffab04374954 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.881599] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1335.881599] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d7a8ac-0067-e788-928b-85ac35174bc1" [ 1335.881599] env[62820]: _type = "Task" [ 1335.881599] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.890881] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d7a8ac-0067-e788-928b-85ac35174bc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.032346] env[62820]: DEBUG nova.network.neutron [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Updating instance_info_cache with network_info: [{"id": "b9fb1075-80d0-4a63-a82b-80d3eedd8fe5", "address": "fa:16:3e:37:fa:79", "network": {"id": "ae850709-e683-4f0e-9f97-eb3697ed3770", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1182603103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fec28e0df25f4d18b8d707ba9849e098", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "17c839f5-4de0-449c-9a24-4e0e2fca37ca", "external-id": "nsx-vlan-transportzone-198", "segmentation_id": 198, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9fb1075-80", "ovs_interfaceid": "b9fb1075-80d0-4a63-a82b-80d3eedd8fe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.105325] env[62820]: DEBUG oslo_concurrency.lockutils [None req-61e4b90d-4bcc-445b-aadc-cef096d74484 tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.182s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.154024] env[62820]: DEBUG nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1336.179404] env[62820]: DEBUG oslo_vmware.api [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695116, 'name': PowerOnVM_Task, 'duration_secs': 0.999717} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.179404] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1336.179404] env[62820]: INFO nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Took 10.28 seconds to spawn the instance on the hypervisor. [ 1336.179404] env[62820]: DEBUG nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1336.180398] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4f1c47-5f4b-4c11-ba85-365e0829e6ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.205491] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1336.205725] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1336.205958] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1336.206190] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1336.207022] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1336.207022] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1336.207743] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1336.207992] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1336.208246] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1336.209587] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1336.209587] env[62820]: DEBUG nova.virt.hardware [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1336.209587] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a38c59-c882-4989-9375-785bb8b0b1e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.224805] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53772448-aa48-4e16-a1f7-c60736a4ab9d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.393601] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d7a8ac-0067-e788-928b-85ac35174bc1, 'name': SearchDatastore_Task, 'duration_secs': 0.014211} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.393931] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.394251] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 846e8df9-b925-4d2e-a90e-4e774c35d0b4/846e8df9-b925-4d2e-a90e-4e774c35d0b4.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1336.394640] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4fa04f8c-ff4f-429e-b6da-4baca70851b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.409040] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1336.409040] env[62820]: value = "task-1695119" [ 1336.409040] env[62820]: _type = "Task" [ 1336.409040] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.424716] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695119, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.536018] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Releasing lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.537912] env[62820]: DEBUG nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Instance network_info: |[{"id": "b9fb1075-80d0-4a63-a82b-80d3eedd8fe5", "address": "fa:16:3e:37:fa:79", "network": {"id": "ae850709-e683-4f0e-9f97-eb3697ed3770", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1182603103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fec28e0df25f4d18b8d707ba9849e098", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "17c839f5-4de0-449c-9a24-4e0e2fca37ca", "external-id": "nsx-vlan-transportzone-198", "segmentation_id": 198, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9fb1075-80", "ovs_interfaceid": "b9fb1075-80d0-4a63-a82b-80d3eedd8fe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1336.540822] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:fa:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '17c839f5-4de0-449c-9a24-4e0e2fca37ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9fb1075-80d0-4a63-a82b-80d3eedd8fe5', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1336.550052] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Creating folder: Project (fec28e0df25f4d18b8d707ba9849e098). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1336.550774] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2e6fa33-df01-4a1e-b9f8-63532e598abb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.570494] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Created folder: Project (fec28e0df25f4d18b8d707ba9849e098) in parent group-v353379. [ 1336.570494] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Creating folder: Instances. Parent ref: group-v353423. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1336.571871] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0f09e91-e18a-4454-9606-8b4019d6ce19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.587493] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Created folder: Instances in parent group-v353423. [ 1336.587852] env[62820]: DEBUG oslo.service.loopingcall [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1336.591532] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1336.592122] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d34378a3-77fe-4cee-ad85-3ee91b727eec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.614820] env[62820]: DEBUG nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1336.624822] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1336.624822] env[62820]: value = "task-1695122" [ 1336.624822] env[62820]: _type = "Task" [ 1336.624822] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.636558] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695122, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.710663] env[62820]: INFO nova.compute.manager [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Took 33.27 seconds to build instance. [ 1336.744623] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54e2e1f-d4e9-44e6-9ab1-80a99f810a61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.753740] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00adc42-d4fd-43d0-8891-dad8b32f644e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.789443] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4f63b3-3bd8-41db-90c8-b551e05dcaf0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.799544] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a425792f-92cf-41f3-b610-a2633b2d86de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.816188] env[62820]: DEBUG nova.compute.provider_tree [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.856291] env[62820]: DEBUG nova.compute.manager [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Received event network-vif-plugged-b9fb1075-80d0-4a63-a82b-80d3eedd8fe5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1336.856763] env[62820]: DEBUG oslo_concurrency.lockutils [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] Acquiring lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.856845] env[62820]: DEBUG oslo_concurrency.lockutils [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.856999] env[62820]: DEBUG oslo_concurrency.lockutils [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.857233] env[62820]: DEBUG nova.compute.manager [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] No waiting events found dispatching network-vif-plugged-b9fb1075-80d0-4a63-a82b-80d3eedd8fe5 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1336.857437] env[62820]: WARNING nova.compute.manager [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Received unexpected event network-vif-plugged-b9fb1075-80d0-4a63-a82b-80d3eedd8fe5 for instance with vm_state building and task_state spawning. [ 1336.857603] env[62820]: DEBUG nova.compute.manager [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Received event network-changed-b9fb1075-80d0-4a63-a82b-80d3eedd8fe5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1336.857764] env[62820]: DEBUG nova.compute.manager [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Refreshing instance network info cache due to event network-changed-b9fb1075-80d0-4a63-a82b-80d3eedd8fe5. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1336.858028] env[62820]: DEBUG oslo_concurrency.lockutils [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] Acquiring lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.858268] env[62820]: DEBUG oslo_concurrency.lockutils [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] Acquired lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.858385] env[62820]: DEBUG nova.network.neutron [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Refreshing network info cache for port b9fb1075-80d0-4a63-a82b-80d3eedd8fe5 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1336.879949] env[62820]: DEBUG nova.compute.manager [req-f2881d3f-9933-495a-9f24-f0c94cd31f85 req-776582cd-03f3-4f08-b210-c7411c6007e6 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Received event network-vif-deleted-05fc8ccf-bb2b-4348-898d-795b93e333a7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1336.881012] env[62820]: INFO nova.compute.manager [req-f2881d3f-9933-495a-9f24-f0c94cd31f85 req-776582cd-03f3-4f08-b210-c7411c6007e6 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Neutron deleted interface 05fc8ccf-bb2b-4348-898d-795b93e333a7; detaching it from the instance and deleting it from the info cache [ 1336.881012] env[62820]: DEBUG nova.network.neutron [req-f2881d3f-9933-495a-9f24-f0c94cd31f85 req-776582cd-03f3-4f08-b210-c7411c6007e6 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.927547] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695119, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.933033] env[62820]: DEBUG nova.network.neutron [-] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.139180] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695122, 'name': CreateVM_Task, 'duration_secs': 0.433839} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.139395] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1337.140378] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.141072] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.143180] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1337.143180] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-792230e5-4646-426f-86e4-8e6728f98666 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.146093] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.151314] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1337.151314] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52edb6a2-e2b3-5718-098d-aa2e2fe55770" [ 1337.151314] env[62820]: _type = "Task" [ 1337.151314] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.162656] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52edb6a2-e2b3-5718-098d-aa2e2fe55770, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.214052] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3069a026-9d6b-4564-82b4-03ab23ba1809 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.864s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.236928] env[62820]: DEBUG nova.network.neutron [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Successfully updated port: 99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1337.319672] env[62820]: DEBUG nova.scheduler.client.report [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1337.385464] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ef81549-c94e-412c-b8f8-bda13cd74457 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.400022] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d92758e-7f8a-4aed-90ac-100cf4002ffa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.422206] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695119, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.703333} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.424019] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 846e8df9-b925-4d2e-a90e-4e774c35d0b4/846e8df9-b925-4d2e-a90e-4e774c35d0b4.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1337.424019] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1337.424019] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60e3f8cc-61ff-4c4f-9c8e-493d6e1ca17a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.435757] env[62820]: DEBUG nova.compute.manager [req-f2881d3f-9933-495a-9f24-f0c94cd31f85 req-776582cd-03f3-4f08-b210-c7411c6007e6 service nova] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Detach interface failed, port_id=05fc8ccf-bb2b-4348-898d-795b93e333a7, reason: Instance 7c5d1740-92ba-4d4b-a557-10f8ea58e883 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1337.438119] env[62820]: INFO nova.compute.manager [-] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Took 1.61 seconds to deallocate network for instance. [ 1337.445127] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1337.445127] env[62820]: value = "task-1695123" [ 1337.445127] env[62820]: _type = "Task" [ 1337.445127] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.457915] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695123, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.664937] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52edb6a2-e2b3-5718-098d-aa2e2fe55770, 'name': SearchDatastore_Task, 'duration_secs': 0.01705} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.665422] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.665722] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1337.666071] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.666362] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.666579] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1337.667243] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff01f3a5-0970-41a4-bf0d-60f031fea511 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.680351] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1337.680351] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1337.680691] env[62820]: DEBUG nova.network.neutron [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Updated VIF entry in instance network info cache for port b9fb1075-80d0-4a63-a82b-80d3eedd8fe5. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1337.681304] env[62820]: DEBUG nova.network.neutron [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Updating instance_info_cache with network_info: [{"id": "b9fb1075-80d0-4a63-a82b-80d3eedd8fe5", "address": "fa:16:3e:37:fa:79", "network": {"id": "ae850709-e683-4f0e-9f97-eb3697ed3770", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1182603103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fec28e0df25f4d18b8d707ba9849e098", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "17c839f5-4de0-449c-9a24-4e0e2fca37ca", "external-id": "nsx-vlan-transportzone-198", "segmentation_id": 198, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9fb1075-80", "ovs_interfaceid": "b9fb1075-80d0-4a63-a82b-80d3eedd8fe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.685467] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d6e78e6-ecf6-4da6-9512-f2cad0ab99dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.689332] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.690034] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.694725] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1337.694725] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c88de6-4294-d55b-5963-fd19c574d968" [ 1337.694725] env[62820]: _type = "Task" [ 1337.694725] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.708284] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c88de6-4294-d55b-5963-fd19c574d968, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.719331] env[62820]: DEBUG nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1337.741028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.741028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquired lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.741420] env[62820]: DEBUG nova.network.neutron [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1337.773637] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "b7c52283-eada-47fd-887f-a5ad94a0583a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.774587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.774816] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.775228] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.775536] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.778457] env[62820]: INFO nova.compute.manager [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Terminating instance [ 1337.824956] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.825530] env[62820]: DEBUG nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1337.829147] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.570s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.830831] env[62820]: INFO nova.compute.claims [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1337.947118] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.957674] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695123, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081485} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.957946] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1337.958752] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091d7af3-9fd1-4d1a-80f8-9f7b05f7f1ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.982058] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 846e8df9-b925-4d2e-a90e-4e774c35d0b4/846e8df9-b925-4d2e-a90e-4e774c35d0b4.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1337.982387] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7a31d01-8a2c-4b69-8708-7e7337e54171 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.004968] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1338.004968] env[62820]: value = "task-1695124" [ 1338.004968] env[62820]: _type = "Task" [ 1338.004968] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.014135] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.191304] env[62820]: DEBUG oslo_concurrency.lockutils [req-410a149e-8ac1-45ea-999c-8d7b93c8ad7d req-bdf61ab4-857e-421b-9ce6-95d2358509a4 service nova] Releasing lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.209336] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c88de6-4294-d55b-5963-fd19c574d968, 'name': SearchDatastore_Task, 'duration_secs': 0.021322} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.210616] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e50653c-6819-49cb-9b6d-66654d24cf80 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.219597] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1338.219597] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521e0eb6-cce8-6ca2-b341-e088061c0770" [ 1338.219597] env[62820]: _type = "Task" [ 1338.219597] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.236338] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521e0eb6-cce8-6ca2-b341-e088061c0770, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.248367] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.277652] env[62820]: DEBUG nova.network.neutron [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1338.282410] env[62820]: DEBUG nova.compute.manager [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1338.282622] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1338.283491] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fe15ee-ad5b-4709-9d9e-95af712d8f13 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.292312] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.292574] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7822185-4712-4364-bd24-689ef9026fcb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.301860] env[62820]: DEBUG oslo_vmware.api [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1338.301860] env[62820]: value = "task-1695125" [ 1338.301860] env[62820]: _type = "Task" [ 1338.301860] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.316895] env[62820]: DEBUG oslo_vmware.api [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.335730] env[62820]: DEBUG nova.compute.utils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1338.341431] env[62820]: DEBUG nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1338.341431] env[62820]: DEBUG nova.network.neutron [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.447019] env[62820]: DEBUG nova.policy [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26edc59f3c4d49febc3fb1f662337cc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd67af71aa7c74b1db99cbf3d7c0bfdb6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1338.485362] env[62820]: DEBUG nova.network.neutron [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updating instance_info_cache with network_info: [{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.521870] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.680442] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1338.683222] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1338.683222] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 1338.732902] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521e0eb6-cce8-6ca2-b341-e088061c0770, 'name': SearchDatastore_Task, 'duration_secs': 0.026663} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.733654] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.734441] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] f2658dfa-baed-4ff3-8c7e-733bbcf1916e/f2658dfa-baed-4ff3-8c7e-733bbcf1916e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1338.734441] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a5f8d23-168d-4874-b15f-ba8dc9c745f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.742655] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1338.742655] env[62820]: value = "task-1695126" [ 1338.742655] env[62820]: _type = "Task" [ 1338.742655] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.752106] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.813220] env[62820]: DEBUG oslo_vmware.api [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695125, 'name': PowerOffVM_Task, 'duration_secs': 0.271276} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.813549] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1338.813776] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1338.814079] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a41375a-bfaf-4a21-98ad-638d28a40f7c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.847391] env[62820]: DEBUG nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1338.956038] env[62820]: DEBUG nova.network.neutron [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Successfully created port: 06e08e58-4a6e-47ab-a9f3-e152d75f1cd0 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1338.987590] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Releasing lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.987945] env[62820]: DEBUG nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Instance network_info: |[{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1338.988419] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:64:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '456bd8a2-0fb6-4b17-9d25-08e7995c5184', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99ce12db-7b90-44f1-8086-9f95246773fe', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1339.000055] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Creating folder: Project (d7f22f2543c747b29127852290bd498c). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1339.003194] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85fa5cd8-0e78-43d4-b609-e77a1e8a362e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.019527] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695124, 'name': ReconfigVM_Task, 'duration_secs': 0.708139} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.019527] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 846e8df9-b925-4d2e-a90e-4e774c35d0b4/846e8df9-b925-4d2e-a90e-4e774c35d0b4.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1339.020173] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Created folder: Project (d7f22f2543c747b29127852290bd498c) in parent group-v353379. [ 1339.020350] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Creating folder: Instances. Parent ref: group-v353426. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1339.023177] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e777ff6c-2294-46b0-9db8-4c9562a0f36c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.025710] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77972910-c0ea-4aaf-b175-f1a303d10142 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.034913] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1339.034913] env[62820]: value = "task-1695129" [ 1339.034913] env[62820]: _type = "Task" [ 1339.034913] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.039976] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Created folder: Instances in parent group-v353426. [ 1339.040242] env[62820]: DEBUG oslo.service.loopingcall [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1339.042325] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1339.043655] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e204c8e4-18b7-40c9-a6dd-a7955225bcdc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.063622] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695129, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.073143] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1339.073143] env[62820]: value = "task-1695131" [ 1339.073143] env[62820]: _type = "Task" [ 1339.073143] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.089981] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695131, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.190294] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Skipping network cache update for instance because it is being deleted. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10303}} [ 1339.190294] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1339.190294] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1339.190555] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1339.190555] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1339.190555] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1339.190756] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.190870] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.191306] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1339.191997] env[62820]: DEBUG nova.objects.instance [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lazy-loading 'info_cache' on Instance uuid 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1339.234330] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1339.234330] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1339.234330] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Deleting the datastore file [datastore1] b7c52283-eada-47fd-887f-a5ad94a0583a {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1339.234330] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d91e4b0-39db-4009-84ac-97e8f65414d0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.249008] env[62820]: DEBUG oslo_vmware.api [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1339.249008] env[62820]: value = "task-1695132" [ 1339.249008] env[62820]: _type = "Task" [ 1339.249008] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.260298] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695126, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.271647] env[62820]: DEBUG oslo_vmware.api [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.370373] env[62820]: DEBUG nova.compute.manager [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1339.371788] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37bfdf40-bdf0-4b6f-a892-ba11c6619c4b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.505665] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccae392d-c48a-467f-8b91-8494e5b31175 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.514467] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f75419-94f6-4216-a8f5-97e07d416e70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.551286] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08facbb3-4891-42f8-bfd5-248a0538a84e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.561150] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cf9be3-449f-4531-95c2-1ad48a528dc5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.565149] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695129, 'name': Rename_Task, 'duration_secs': 0.337675} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.567135] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1339.570714] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ea9d518-7fef-40c9-a32e-507dc76ed5e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.572465] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.572724] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.573781] env[62820]: DEBUG nova.compute.manager [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Received event network-vif-plugged-99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1339.573973] env[62820]: DEBUG oslo_concurrency.lockutils [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] Acquiring lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.574178] env[62820]: DEBUG oslo_concurrency.lockutils [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] Lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.574336] env[62820]: DEBUG oslo_concurrency.lockutils [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] Lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.574502] env[62820]: DEBUG nova.compute.manager [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] No waiting events found dispatching network-vif-plugged-99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1339.574663] env[62820]: WARNING nova.compute.manager [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Received unexpected event network-vif-plugged-99ce12db-7b90-44f1-8086-9f95246773fe for instance with vm_state building and task_state spawning. [ 1339.574815] env[62820]: DEBUG nova.compute.manager [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Received event network-changed-99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1339.574991] env[62820]: DEBUG nova.compute.manager [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Refreshing instance network info cache due to event network-changed-99ce12db-7b90-44f1-8086-9f95246773fe. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1339.575205] env[62820]: DEBUG oslo_concurrency.lockutils [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] Acquiring lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.575320] env[62820]: DEBUG oslo_concurrency.lockutils [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] Acquired lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.575470] env[62820]: DEBUG nova.network.neutron [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Refreshing network info cache for port 99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1339.585207] env[62820]: DEBUG nova.compute.provider_tree [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.595297] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1339.595297] env[62820]: value = "task-1695133" [ 1339.595297] env[62820]: _type = "Task" [ 1339.595297] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.599164] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695131, 'name': CreateVM_Task, 'duration_secs': 0.524614} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.605080] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1339.605080] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.605080] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.605080] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1339.605256] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aaca33c-9ab5-46fb-9fe1-046b363e2f60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.613542] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.615106] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1339.615106] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ece759-8460-f875-6477-ab07976fc38f" [ 1339.615106] env[62820]: _type = "Task" [ 1339.615106] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.626782] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ece759-8460-f875-6477-ab07976fc38f, 'name': SearchDatastore_Task, 'duration_secs': 0.009736} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.627541] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.627637] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1339.627914] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.628122] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.628437] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1339.628611] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edc20509-0ae4-49d9-8520-46b6fc51f08b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.641625] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1339.641900] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1339.642749] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a1d64aa-1066-4b49-bafb-cb745d84c3ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.653017] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1339.653017] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a3bbba-af15-abf4-5a11-1032495a7bb4" [ 1339.653017] env[62820]: _type = "Task" [ 1339.653017] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.667929] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a3bbba-af15-abf4-5a11-1032495a7bb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.753460] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695126, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531571} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.756689] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] f2658dfa-baed-4ff3-8c7e-733bbcf1916e/f2658dfa-baed-4ff3-8c7e-733bbcf1916e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1339.756910] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1339.757167] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ecb37b19-913e-490c-a6af-adc57a1d18f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.764520] env[62820]: DEBUG oslo_vmware.api [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240025} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.765767] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1339.765976] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1339.766182] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1339.766354] env[62820]: INFO nova.compute.manager [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Took 1.48 seconds to destroy the instance on the hypervisor. [ 1339.766590] env[62820]: DEBUG oslo.service.loopingcall [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1339.766866] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1339.766866] env[62820]: value = "task-1695134" [ 1339.766866] env[62820]: _type = "Task" [ 1339.766866] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.767062] env[62820]: DEBUG nova.compute.manager [-] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1339.767160] env[62820]: DEBUG nova.network.neutron [-] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1339.777575] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.874022] env[62820]: DEBUG nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1339.897904] env[62820]: INFO nova.compute.manager [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] instance snapshotting [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1339.904134] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1339.905041] env[62820]: DEBUG nova.virt.hardware [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1339.906083] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216b6442-6f93-4482-a2be-0b68a287bf94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.911953] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a11c3f8-6baa-4450-96a1-5784046826d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.936863] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494fcc1a-0bc7-4666-ad54-241c381e1027 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.940868] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6aed99-3d8d-4088-9ba5-5618313d1c38 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.097177] env[62820]: DEBUG nova.scheduler.client.report [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1340.130712] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.173202] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a3bbba-af15-abf4-5a11-1032495a7bb4, 'name': SearchDatastore_Task, 'duration_secs': 0.016188} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.174044] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeb3cc38-c598-4efd-9525-bb65b18dd447 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.185255] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1340.185255] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523ba131-e98e-c6f2-a3ea-331c314bcc5a" [ 1340.185255] env[62820]: _type = "Task" [ 1340.185255] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.207416] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523ba131-e98e-c6f2-a3ea-331c314bcc5a, 'name': SearchDatastore_Task, 'duration_secs': 0.010868} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.207868] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.208777] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65/7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1340.208869] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cda4238d-4d73-493c-bbc5-b6775ce42748 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.222416] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1340.222416] env[62820]: value = "task-1695135" [ 1340.222416] env[62820]: _type = "Task" [ 1340.222416] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.240158] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.281387] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695134, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.240262} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.281523] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1340.282286] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5da80be-3e54-4c2c-9387-abf9e56bf231 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.306626] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] f2658dfa-baed-4ff3-8c7e-733bbcf1916e/f2658dfa-baed-4ff3-8c7e-733bbcf1916e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.310413] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-754ddb4a-4500-4f41-8108-60a5b9eff92e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.334202] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1340.334202] env[62820]: value = "task-1695136" [ 1340.334202] env[62820]: _type = "Task" [ 1340.334202] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.347919] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695136, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.462851] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1340.463215] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6fb76a1e-df31-4fe4-bb94-8e586173cad0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.471333] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1340.471333] env[62820]: value = "task-1695137" [ 1340.471333] env[62820]: _type = "Task" [ 1340.471333] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.483761] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695137, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.501767] env[62820]: DEBUG nova.network.neutron [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updated VIF entry in instance network info cache for port 99ce12db-7b90-44f1-8086-9f95246773fe. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1340.501881] env[62820]: DEBUG nova.network.neutron [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updating instance_info_cache with network_info: [{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.610242] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.781s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.610809] env[62820]: DEBUG nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1340.614242] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.048s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.626540] env[62820]: DEBUG oslo_vmware.api [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695133, 'name': PowerOnVM_Task, 'duration_secs': 0.836004} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.626540] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1340.626540] env[62820]: INFO nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Took 9.89 seconds to spawn the instance on the hypervisor. [ 1340.626540] env[62820]: DEBUG nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1340.626540] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e83786-acd2-4620-9279-360098030e65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.742097] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695135, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.792789] env[62820]: DEBUG nova.compute.manager [req-450a85d1-178b-44c7-b428-5825a6d70afd req-166ab325-ce6d-41fb-89d7-0055658f23e5 service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Received event network-vif-plugged-06e08e58-4a6e-47ab-a9f3-e152d75f1cd0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1340.793117] env[62820]: DEBUG oslo_concurrency.lockutils [req-450a85d1-178b-44c7-b428-5825a6d70afd req-166ab325-ce6d-41fb-89d7-0055658f23e5 service nova] Acquiring lock "42d00bd3-71fa-4c26-a544-489326163d88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.793435] env[62820]: DEBUG oslo_concurrency.lockutils [req-450a85d1-178b-44c7-b428-5825a6d70afd req-166ab325-ce6d-41fb-89d7-0055658f23e5 service nova] Lock "42d00bd3-71fa-4c26-a544-489326163d88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.793635] env[62820]: DEBUG oslo_concurrency.lockutils [req-450a85d1-178b-44c7-b428-5825a6d70afd req-166ab325-ce6d-41fb-89d7-0055658f23e5 service nova] Lock "42d00bd3-71fa-4c26-a544-489326163d88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.793830] env[62820]: DEBUG nova.compute.manager [req-450a85d1-178b-44c7-b428-5825a6d70afd req-166ab325-ce6d-41fb-89d7-0055658f23e5 service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] No waiting events found dispatching network-vif-plugged-06e08e58-4a6e-47ab-a9f3-e152d75f1cd0 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1340.794045] env[62820]: WARNING nova.compute.manager [req-450a85d1-178b-44c7-b428-5825a6d70afd req-166ab325-ce6d-41fb-89d7-0055658f23e5 service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Received unexpected event network-vif-plugged-06e08e58-4a6e-47ab-a9f3-e152d75f1cd0 for instance with vm_state building and task_state spawning. [ 1340.847360] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695136, 'name': ReconfigVM_Task, 'duration_secs': 0.378877} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.847360] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Reconfigured VM instance instance-0000000f to attach disk [datastore1] f2658dfa-baed-4ff3-8c7e-733bbcf1916e/f2658dfa-baed-4ff3-8c7e-733bbcf1916e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1340.848792] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e97e626f-e495-44a8-b32d-8c626b2d7a98 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.859177] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1340.859177] env[62820]: value = "task-1695138" [ 1340.859177] env[62820]: _type = "Task" [ 1340.859177] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.874341] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695138, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.897515] env[62820]: DEBUG nova.network.neutron [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Successfully updated port: 06e08e58-4a6e-47ab-a9f3-e152d75f1cd0 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1340.987673] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695137, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.006102] env[62820]: DEBUG oslo_concurrency.lockutils [req-2485990f-9d41-44d1-94ae-365c263eb1f8 req-1f0cfc9d-cf30-4f03-a080-957342393943 service nova] Releasing lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.070282] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Updating instance_info_cache with network_info: [{"id": "337d3329-4826-4d1a-a659-b6ce135f8b94", "address": "fa:16:3e:17:56:bb", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap337d3329-48", "ovs_interfaceid": "337d3329-4826-4d1a-a659-b6ce135f8b94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.120856] env[62820]: DEBUG nova.compute.utils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1341.122573] env[62820]: DEBUG nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1341.122749] env[62820]: DEBUG nova.network.neutron [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1341.151147] env[62820]: INFO nova.compute.manager [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Took 33.98 seconds to build instance. [ 1341.163897] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244cb3d2-b354-45c9-be6f-a4842a7a2a54 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.175166] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0f3d8a-1b1c-4d64-a56e-6c64a3e99dff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.209836] env[62820]: DEBUG nova.policy [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6f2dd7e4b5a4484a9aef4d51061c7d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8561ded662f04b3eb420b60ca3345771', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1341.212124] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70219397-4e89-472b-a1f9-cfe11a205eb4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.219813] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3555fd7d-6b26-466c-a446-494bdbffe553 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.235941] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.245372] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534215} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.247963] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65/7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1341.248225] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1341.248481] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59d45e7b-03f4-435a-b7c5-db2856029058 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.256595] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1341.256595] env[62820]: value = "task-1695139" [ 1341.256595] env[62820]: _type = "Task" [ 1341.256595] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.265274] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695139, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.296622] env[62820]: DEBUG nova.network.neutron [-] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.371604] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695138, 'name': Rename_Task, 'duration_secs': 0.141111} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.372024] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1341.372306] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe4160ad-25c6-49b9-b8c5-cfcfa7d27138 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.378665] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1341.378665] env[62820]: value = "task-1695140" [ 1341.378665] env[62820]: _type = "Task" [ 1341.378665] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.388922] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.406521] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquiring lock "refresh_cache-42d00bd3-71fa-4c26-a544-489326163d88" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.406521] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquired lock "refresh_cache-42d00bd3-71fa-4c26-a544-489326163d88" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.406521] env[62820]: DEBUG nova.network.neutron [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1341.482345] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695137, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.578654] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-4fa6e38f-dcca-4f65-86d6-1c585deb1c13" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.578895] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 1341.579141] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.579413] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.625906] env[62820]: DEBUG nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1341.655297] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a5408b3-4fa4-4012-be5a-c055460bf72e tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.459s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.726736] env[62820]: DEBUG nova.compute.manager [req-b77211c0-326c-47a3-b228-05de7f04ad6f req-59a7b698-fd84-4b89-ae1b-485412254e3e service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-vif-deleted-9002583e-7d52-45aa-bb62-0eef82acb545 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1341.726736] env[62820]: DEBUG nova.compute.manager [req-b77211c0-326c-47a3-b228-05de7f04ad6f req-59a7b698-fd84-4b89-ae1b-485412254e3e service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-vif-deleted-7da55fa7-efc1-42e6-a489-fad614ea19e4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1341.727675] env[62820]: DEBUG nova.compute.manager [req-b77211c0-326c-47a3-b228-05de7f04ad6f req-59a7b698-fd84-4b89-ae1b-485412254e3e service nova] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Received event network-vif-deleted-338a4d2a-de27-4e93-bf11-7c91765295a4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1341.741750] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1341.765050] env[62820]: DEBUG nova.network.neutron [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Successfully created port: b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1341.770182] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695139, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.248186} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.770851] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1341.771841] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de4134c-d230-477e-b503-7d69a477fd24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.796702] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65/7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1341.797550] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4417c68b-2978-4d8b-b9f1-0178f72e7d35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.813263] env[62820]: INFO nova.compute.manager [-] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Took 2.05 seconds to deallocate network for instance. [ 1341.820768] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1341.820768] env[62820]: value = "task-1695141" [ 1341.820768] env[62820]: _type = "Task" [ 1341.820768] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.832113] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695141, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.890591] env[62820]: DEBUG oslo_vmware.api [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695140, 'name': PowerOnVM_Task, 'duration_secs': 0.487249} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.890938] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1341.891233] env[62820]: INFO nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1341.891459] env[62820]: DEBUG nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1341.892463] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bdf1e8-1ce8-4526-a978-2b177b5b1606 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.978762] env[62820]: DEBUG nova.network.neutron [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1341.986938] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695137, 'name': CreateSnapshot_Task, 'duration_secs': 1.51192} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.988262] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1341.991176] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441a1239-2f48-45db-b787-03e61bf71f57 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.086829] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.156252] env[62820]: DEBUG nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1342.247626] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.259740] env[62820]: DEBUG nova.network.neutron [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Updating instance_info_cache with network_info: [{"id": "06e08e58-4a6e-47ab-a9f3-e152d75f1cd0", "address": "fa:16:3e:63:be:f8", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e08e58-4a", "ovs_interfaceid": "06e08e58-4a6e-47ab-a9f3-e152d75f1cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.259740] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.018s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.260176] env[62820]: INFO nova.compute.claims [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1342.322405] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.331794] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695141, 'name': ReconfigVM_Task, 'duration_secs': 0.276947} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.332083] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65/7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1342.332720] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6dbff103-f89b-4eaa-8ff9-a9527e173cbb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.339242] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1342.339242] env[62820]: value = "task-1695142" [ 1342.339242] env[62820]: _type = "Task" [ 1342.339242] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.346804] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695142, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.414957] env[62820]: INFO nova.compute.manager [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Took 28.48 seconds to build instance. [ 1342.511377] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1342.511611] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0f8559b1-22e9-42f0-8ef6-4b87bc7d01f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.520191] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1342.520191] env[62820]: value = "task-1695143" [ 1342.520191] env[62820]: _type = "Task" [ 1342.520191] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.529825] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695143, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.638126] env[62820]: DEBUG nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1342.659321] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1342.659630] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1342.659791] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1342.659977] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1342.660176] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1342.660331] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1342.660542] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1342.660692] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1342.660858] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1342.661030] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1342.661209] env[62820]: DEBUG nova.virt.hardware [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1342.664198] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66855e51-4a89-4ba7-9a4a-7b4e4c0d089e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.675320] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8828ec9-8e10-4eb4-a4f3-311dd9b793cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.690586] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.768129] env[62820]: ERROR nova.compute.manager [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Error trying to reschedule: nova.exception_Remote.NoValidHost_Remote: No valid host was found. No valid host found for resize [ 1342.768129] env[62820]: Traceback (most recent call last): [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/manager.py", line 387, in _cold_migrate [ 1342.768129] env[62820]: task.execute() [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/tasks/base.py", line 25, in wrap [ 1342.768129] env[62820]: with excutils.save_and_reraise_exception(): [ 1342.768129] env[62820]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1342.768129] env[62820]: self.force_reraise() [ 1342.768129] env[62820]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1342.768129] env[62820]: raise self.value [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/tasks/base.py", line 23, in wrap [ 1342.768129] env[62820]: return original(self) [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/tasks/base.py", line 40, in execute [ 1342.768129] env[62820]: return self._execute() [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/tasks/migrate.py", line 310, in _execute [ 1342.768129] env[62820]: selection = self._reschedule() [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/tasks/migrate.py", line 392, in _reschedule [ 1342.768129] env[62820]: raise exception.MaxRetriesExceeded(reason=reason) [ 1342.768129] env[62820]: nova.exception.MaxRetriesExceeded: Exceeded maximum number of retries. Exhausted all hosts available for retrying build failures for instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13. [ 1342.768129] env[62820]: During handling of the above exception, another exception occurred: [ 1342.768129] env[62820]: Traceback (most recent call last): [ 1342.768129] env[62820]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 262, in inner [ 1342.768129] env[62820]: return func(*args, **kwargs) [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/manager.py", line 102, in wrapper [ 1342.768129] env[62820]: return fn(self, context, *args, **kwargs) [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1342.768129] env[62820]: return function(self, context, *args, **kwargs) [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/manager.py", line 333, in migrate_server [ 1342.768129] env[62820]: self._cold_migrate(context, instance, flavor, [ 1342.768129] env[62820]: File "/opt/stack/nova/nova/conductor/manager.py", line 402, in _cold_migrate [ 1342.768129] env[62820]: raise exception.NoValidHost(reason=msg) [ 1342.768129] env[62820]: nova.exception.NoValidHost: No valid host was found. No valid host found for resize [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Traceback (most recent call last): [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/manager.py", line 6092, in prep_resize [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self._prep_resize(context, image, instance, [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/manager.py", line 6005, in _prep_resize [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] with self.rt.resize_claim( [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return f(*args, **kwargs) [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 238, in resize_claim [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return self._move_claim( [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 378, in _move_claim [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self._update(elevated, cn) [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self._update_to_placement(context, compute_node, startup) [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise attempt.get() [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 719, in reraise [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise value [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1342.768129] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self.reportclient.update_from_provider_tree( [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1499, in update_from_provider_tree [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self.set_inventory_for_provider( [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1004, in set_inventory_for_provider [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise exception.ResourceProviderUpdateConflict( [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a (generation 30): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-78c20d76-69d1-45f6-9c50-350cdc37a452"}]} [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] During handling of the above exception, another exception occurred: [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Traceback (most recent call last): [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/manager.py", line 6147, in _reschedule_resize_or_reraise [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self.compute_task_api.resize_instance( [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/api.py", line 94, in resize_instance [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self.conductor_compute_rpcapi.migrate_server( [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/rpcapi.py", line 352, in migrate_server [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return cctxt.call(context, 'migrate_server', **kw) [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/client.py", line 190, in call [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] result = self.transport._send( [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/transport.py", line 123, in _send [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return self._driver.send(target, ctxt, message, [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 800, in send [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return self._send(target, ctxt, message, wait_for_reply, timeout, [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 792, in _send [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise result [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] nova.exception_Remote.NoValidHost_Remote: No valid host was found. No valid host found for resize [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Traceback (most recent call last): [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/manager.py", line 387, in _cold_migrate [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] task.execute() [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/tasks/base.py", line 25, in wrap [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] with excutils.save_and_reraise_exception(): [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self.force_reraise() [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise self.value [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.769574] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/tasks/base.py", line 23, in wrap [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return original(self) [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/tasks/base.py", line 40, in execute [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return self._execute() [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/tasks/migrate.py", line 310, in _execute [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] selection = self._reschedule() [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/tasks/migrate.py", line 392, in _reschedule [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise exception.MaxRetriesExceeded(reason=reason) [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] nova.exception.MaxRetriesExceeded: Exceeded maximum number of retries. Exhausted all hosts available for retrying build failures for instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13. [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] During handling of the above exception, another exception occurred: [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Traceback (most recent call last): [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 262, in inner [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return func(*args, **kwargs) [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/manager.py", line 102, in wrapper [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return fn(self, context, *args, **kwargs) [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return function(self, context, *args, **kwargs) [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/manager.py", line 333, in migrate_server [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self._cold_migrate(context, instance, flavor, [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/conductor/manager.py", line 402, in _cold_migrate [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise exception.NoValidHost(reason=msg) [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] nova.exception.NoValidHost: No valid host was found. No valid host found for resize [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1342.770515] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Releasing lock "refresh_cache-42d00bd3-71fa-4c26-a544-489326163d88" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.771428] env[62820]: DEBUG nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Instance network_info: |[{"id": "06e08e58-4a6e-47ab-a9f3-e152d75f1cd0", "address": "fa:16:3e:63:be:f8", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e08e58-4a", "ovs_interfaceid": "06e08e58-4a6e-47ab-a9f3-e152d75f1cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1342.772599] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:be:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06e08e58-4a6e-47ab-a9f3-e152d75f1cd0', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1342.780241] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Creating folder: Project (d67af71aa7c74b1db99cbf3d7c0bfdb6). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1342.781294] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5419129-ec7d-4046-afdd-4a98316077ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.792054] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Created folder: Project (d67af71aa7c74b1db99cbf3d7c0bfdb6) in parent group-v353379. [ 1342.792253] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Creating folder: Instances. Parent ref: group-v353431. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1342.792482] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0284b1ed-eaae-4560-bec0-3a8ca18e9e43 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.801719] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Created folder: Instances in parent group-v353431. [ 1342.801946] env[62820]: DEBUG oslo.service.loopingcall [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1342.802143] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1342.802838] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-febcc850-3583-4256-90e3-06072c8a0838 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.822504] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1342.822504] env[62820]: value = "task-1695146" [ 1342.822504] env[62820]: _type = "Task" [ 1342.822504] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.833601] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695146, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.852301] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695142, 'name': Rename_Task, 'duration_secs': 0.14632} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.853053] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.853053] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c64d8bbd-8b14-4bd6-a8c7-34f3d1fb31c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.859496] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1342.859496] env[62820]: value = "task-1695147" [ 1342.859496] env[62820]: _type = "Task" [ 1342.859496] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.870206] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695147, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.896789] env[62820]: DEBUG nova.compute.manager [req-becf6b35-5e0f-4187-832f-55e39f4a7ec0 req-9a89f05b-61b9-46a6-bbfb-94def95ffa2a service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Received event network-changed-06e08e58-4a6e-47ab-a9f3-e152d75f1cd0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1342.897148] env[62820]: DEBUG nova.compute.manager [req-becf6b35-5e0f-4187-832f-55e39f4a7ec0 req-9a89f05b-61b9-46a6-bbfb-94def95ffa2a service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Refreshing instance network info cache due to event network-changed-06e08e58-4a6e-47ab-a9f3-e152d75f1cd0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1342.897405] env[62820]: DEBUG oslo_concurrency.lockutils [req-becf6b35-5e0f-4187-832f-55e39f4a7ec0 req-9a89f05b-61b9-46a6-bbfb-94def95ffa2a service nova] Acquiring lock "refresh_cache-42d00bd3-71fa-4c26-a544-489326163d88" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.897558] env[62820]: DEBUG oslo_concurrency.lockutils [req-becf6b35-5e0f-4187-832f-55e39f4a7ec0 req-9a89f05b-61b9-46a6-bbfb-94def95ffa2a service nova] Acquired lock "refresh_cache-42d00bd3-71fa-4c26-a544-489326163d88" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.897708] env[62820]: DEBUG nova.network.neutron [req-becf6b35-5e0f-4187-832f-55e39f4a7ec0 req-9a89f05b-61b9-46a6-bbfb-94def95ffa2a service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Refreshing network info cache for port 06e08e58-4a6e-47ab-a9f3-e152d75f1cd0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1342.917866] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed2089da-f9ef-462c-86be-74bfe7e8be25 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.021s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.000775] env[62820]: DEBUG nova.compute.manager [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1343.001782] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d974f207-99f6-4999-8162-d274f412f922 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.033659] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695143, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.272375] env[62820]: DEBUG nova.compute.utils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] No valid host was found. No valid host found for resize {{(pid=62820) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1343.290372] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.290683] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.291168] env[62820]: INFO nova.compute.manager [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Rebooting instance [ 1343.333034] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695146, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.372742] env[62820]: DEBUG oslo_vmware.api [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695147, 'name': PowerOnVM_Task, 'duration_secs': 0.47554} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.372742] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1343.372894] env[62820]: INFO nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Took 7.22 seconds to spawn the instance on the hypervisor. [ 1343.373671] env[62820]: DEBUG nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1343.373896] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d94ea9-c36a-449f-9530-5bdecf57c48d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.420913] env[62820]: DEBUG nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1343.513488] env[62820]: INFO nova.compute.manager [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] instance snapshotting [ 1343.519976] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8b79be-1d0a-4ecc-82ff-06df6781d8d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.532391] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695143, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.551326] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9933e02a-0442-43fa-8937-bbaf3c7be925 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.781386] env[62820]: ERROR nova.compute.manager [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Setting instance vm_state to ERROR: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a (generation 30): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-78c20d76-69d1-45f6-9c50-350cdc37a452"}]} [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Traceback (most recent call last): [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/manager.py", line 11223, in _error_out_instance_on_exception [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] yield [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/manager.py", line 6110, in prep_resize [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self._reschedule_resize_or_reraise(context, instance, [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/manager.py", line 6190, in _reschedule_resize_or_reraise [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise exc [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/manager.py", line 6092, in prep_resize [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self._prep_resize(context, image, instance, [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/manager.py", line 6005, in _prep_resize [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] with self.rt.resize_claim( [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return f(*args, **kwargs) [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 238, in resize_claim [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return self._move_claim( [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 378, in _move_claim [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self._update(elevated, cn) [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self._update_to_placement(context, compute_node, startup) [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise attempt.get() [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 719, in reraise [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise value [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self.reportclient.update_from_provider_tree( [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1499, in update_from_provider_tree [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] self.set_inventory_for_provider( [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1004, in set_inventory_for_provider [ 1343.781386] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] raise exception.ResourceProviderUpdateConflict( [ 1343.782303] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a (generation 30): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-78c20d76-69d1-45f6-9c50-350cdc37a452"}]} [ 1343.782303] env[62820]: ERROR nova.compute.manager [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] [ 1343.808272] env[62820]: DEBUG nova.network.neutron [req-becf6b35-5e0f-4187-832f-55e39f4a7ec0 req-9a89f05b-61b9-46a6-bbfb-94def95ffa2a service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Updated VIF entry in instance network info cache for port 06e08e58-4a6e-47ab-a9f3-e152d75f1cd0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1343.808632] env[62820]: DEBUG nova.network.neutron [req-becf6b35-5e0f-4187-832f-55e39f4a7ec0 req-9a89f05b-61b9-46a6-bbfb-94def95ffa2a service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Updating instance_info_cache with network_info: [{"id": "06e08e58-4a6e-47ab-a9f3-e152d75f1cd0", "address": "fa:16:3e:63:be:f8", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e08e58-4a", "ovs_interfaceid": "06e08e58-4a6e-47ab-a9f3-e152d75f1cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.837137] env[62820]: DEBUG nova.network.neutron [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Successfully updated port: b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1343.838466] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695146, 'name': CreateVM_Task, 'duration_secs': 0.512981} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.839506] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.839669] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquired lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.839836] env[62820]: DEBUG nova.network.neutron [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1343.841538] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1343.842566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.842765] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.843054] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1343.843308] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c13f788-3058-4287-8b4e-ea427ee07b52 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.850744] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1343.850744] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bd1f57-7786-69b0-6e28-8bc08729e917" [ 1343.850744] env[62820]: _type = "Task" [ 1343.850744] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.852063] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66a5839-c689-46b0-b2e5-19540928d267 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.868030] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a87346-fb69-47e7-9546-823a14252d0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.872636] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bd1f57-7786-69b0-6e28-8bc08729e917, 'name': SearchDatastore_Task, 'duration_secs': 0.014493} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.872636] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.872881] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1343.873117] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.873264] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.873448] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1343.874279] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f49d40c-2f2e-4b4d-9b82-047b226f13ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.912086] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b794e672-8b14-41ab-b36f-eb1c8f28e6e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.913168] env[62820]: INFO nova.compute.manager [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Took 29.76 seconds to build instance. [ 1343.916470] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1343.916650] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1343.919304] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-811bb670-4bfe-4698-977d-869a5ce724ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.922993] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3d4cdd-5dfc-41db-bf0f-cfd54514c3ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.931770] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1343.931770] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a75b31-e558-a1a9-8e7a-ce9a4170b81d" [ 1343.931770] env[62820]: _type = "Task" [ 1343.931770] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.948602] env[62820]: DEBUG nova.compute.provider_tree [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1343.959057] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a75b31-e558-a1a9-8e7a-ce9a4170b81d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.963891] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.036827] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695143, 'name': CloneVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.063482] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1344.063816] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-883535ef-9137-45b3-a2ec-d0ed21a44300 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.071289] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1344.071289] env[62820]: value = "task-1695148" [ 1344.071289] env[62820]: _type = "Task" [ 1344.071289] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.078058] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695148, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.312099] env[62820]: DEBUG oslo_concurrency.lockutils [req-becf6b35-5e0f-4187-832f-55e39f4a7ec0 req-9a89f05b-61b9-46a6-bbfb-94def95ffa2a service nova] Releasing lock "refresh_cache-42d00bd3-71fa-4c26-a544-489326163d88" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.343018] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.344068] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquired lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.344425] env[62820]: DEBUG nova.network.neutron [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1344.416037] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b072ae0f-5d5f-4d87-81aa-cbecfc4d9a64 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.647s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.442499] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a75b31-e558-a1a9-8e7a-ce9a4170b81d, 'name': SearchDatastore_Task, 'duration_secs': 0.025337} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.443672] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52872d13-b4be-455c-a146-c07e0ac3d7cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.449703] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1344.449703] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521b8c0d-c0a4-32c9-f1fa-fa4cf67debdb" [ 1344.449703] env[62820]: _type = "Task" [ 1344.449703] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.453936] env[62820]: DEBUG nova.scheduler.client.report [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1344.467969] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521b8c0d-c0a4-32c9-f1fa-fa4cf67debdb, 'name': SearchDatastore_Task} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.470718] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.471395] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 42d00bd3-71fa-4c26-a544-489326163d88/42d00bd3-71fa-4c26-a544-489326163d88.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1344.471395] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7323b481-fbc9-4f8e-8229-ebe52ee56cf2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.479071] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1344.479071] env[62820]: value = "task-1695149" [ 1344.479071] env[62820]: _type = "Task" [ 1344.479071] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.492737] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695149, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.536554] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695143, 'name': CloneVM_Task, 'duration_secs': 1.602876} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.536900] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Created linked-clone VM from snapshot [ 1344.537921] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc7c23f-311c-4866-8179-cbb4455bf21d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.546689] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Uploading image 21d78180-c8ca-41c8-929a-50aaa66fa080 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1344.573253] env[62820]: DEBUG oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1344.573253] env[62820]: value = "vm-353430" [ 1344.573253] env[62820]: _type = "VirtualMachine" [ 1344.573253] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1344.573437] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2d94f345-9e5a-4fb6-9a19-b8baee6ea13c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.587909] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695148, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.591330] env[62820]: DEBUG oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lease: (returnval){ [ 1344.591330] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a69523-c7c1-3f3f-7124-361c7b36a373" [ 1344.591330] env[62820]: _type = "HttpNfcLease" [ 1344.591330] env[62820]: } obtained for exporting VM: (result){ [ 1344.591330] env[62820]: value = "vm-353430" [ 1344.591330] env[62820]: _type = "VirtualMachine" [ 1344.591330] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1344.592041] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the lease: (returnval){ [ 1344.592041] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a69523-c7c1-3f3f-7124-361c7b36a373" [ 1344.592041] env[62820]: _type = "HttpNfcLease" [ 1344.592041] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1344.602273] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1344.602273] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a69523-c7c1-3f3f-7124-361c7b36a373" [ 1344.602273] env[62820]: _type = "HttpNfcLease" [ 1344.602273] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1344.602273] env[62820]: DEBUG oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1344.602273] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a69523-c7c1-3f3f-7124-361c7b36a373" [ 1344.602273] env[62820]: _type = "HttpNfcLease" [ 1344.602273] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1344.602890] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e039a06-4d0c-405a-9579-769c8583de3c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.614164] env[62820]: DEBUG oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c11f63-5fcd-8129-0f13-8ee865876502/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1344.614164] env[62820]: DEBUG oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c11f63-5fcd-8129-0f13-8ee865876502/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1344.733945] env[62820]: DEBUG nova.network.neutron [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Updating instance_info_cache with network_info: [{"id": "b9fb1075-80d0-4a63-a82b-80d3eedd8fe5", "address": "fa:16:3e:37:fa:79", "network": {"id": "ae850709-e683-4f0e-9f97-eb3697ed3770", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1182603103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fec28e0df25f4d18b8d707ba9849e098", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "17c839f5-4de0-449c-9a24-4e0e2fca37ca", "external-id": "nsx-vlan-transportzone-198", "segmentation_id": 198, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9fb1075-80", "ovs_interfaceid": "b9fb1075-80d0-4a63-a82b-80d3eedd8fe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.743998] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d66461bf-922a-4136-8a68-ab4a4533b539 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.893115] env[62820]: DEBUG nova.network.neutron [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1344.920724] env[62820]: DEBUG nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1344.956311] env[62820]: DEBUG nova.compute.manager [req-bb9586af-8cb2-4470-bcd8-de676c9fd17d req-eb1f7b79-4c13-4fd3-a3b8-c06dbfb4cc29 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Received event network-changed-99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1344.956556] env[62820]: DEBUG nova.compute.manager [req-bb9586af-8cb2-4470-bcd8-de676c9fd17d req-eb1f7b79-4c13-4fd3-a3b8-c06dbfb4cc29 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Refreshing instance network info cache due to event network-changed-99ce12db-7b90-44f1-8086-9f95246773fe. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1344.956786] env[62820]: DEBUG oslo_concurrency.lockutils [req-bb9586af-8cb2-4470-bcd8-de676c9fd17d req-eb1f7b79-4c13-4fd3-a3b8-c06dbfb4cc29 service nova] Acquiring lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.956979] env[62820]: DEBUG oslo_concurrency.lockutils [req-bb9586af-8cb2-4470-bcd8-de676c9fd17d req-eb1f7b79-4c13-4fd3-a3b8-c06dbfb4cc29 service nova] Acquired lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.957707] env[62820]: DEBUG nova.network.neutron [req-bb9586af-8cb2-4470-bcd8-de676c9fd17d req-eb1f7b79-4c13-4fd3-a3b8-c06dbfb4cc29 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Refreshing network info cache for port 99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1344.963920] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.964366] env[62820]: DEBUG nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1344.967505] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.578s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.969054] env[62820]: INFO nova.compute.claims [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1344.995238] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695149, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.083951] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695148, 'name': CreateSnapshot_Task, 'duration_secs': 0.623772} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.084155] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1345.085029] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8253e1f-6aba-4d48-9ece-01b8542dd2b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.140253] env[62820]: DEBUG nova.network.neutron [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Updating instance_info_cache with network_info: [{"id": "b5622bc1-fd38-457a-9f31-249b2c1721ce", "address": "fa:16:3e:c3:4c:f6", "network": {"id": "cd549f33-30d7-42c9-93f3-fef740c084f5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-998216747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561ded662f04b3eb420b60ca3345771", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5622bc1-fd", "ovs_interfaceid": "b5622bc1-fd38-457a-9f31-249b2c1721ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.183633] env[62820]: DEBUG nova.compute.manager [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Received event network-vif-plugged-b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1345.183951] env[62820]: DEBUG oslo_concurrency.lockutils [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] Acquiring lock "a06d736c-a704-46e8-a6f7-85d8be40804f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.185427] env[62820]: DEBUG oslo_concurrency.lockutils [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.185427] env[62820]: DEBUG oslo_concurrency.lockutils [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.185427] env[62820]: DEBUG nova.compute.manager [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] No waiting events found dispatching network-vif-plugged-b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1345.185427] env[62820]: WARNING nova.compute.manager [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Received unexpected event network-vif-plugged-b5622bc1-fd38-457a-9f31-249b2c1721ce for instance with vm_state building and task_state spawning. [ 1345.185818] env[62820]: DEBUG nova.compute.manager [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Received event network-changed-b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1345.185818] env[62820]: DEBUG nova.compute.manager [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Refreshing instance network info cache due to event network-changed-b5622bc1-fd38-457a-9f31-249b2c1721ce. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1345.185893] env[62820]: DEBUG oslo_concurrency.lockutils [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] Acquiring lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.239668] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Releasing lock "refresh_cache-f2658dfa-baed-4ff3-8c7e-733bbcf1916e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.447428] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.476328] env[62820]: DEBUG nova.compute.utils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1345.477835] env[62820]: DEBUG nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1345.478104] env[62820]: DEBUG nova.network.neutron [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1345.492652] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695149, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572257} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.493064] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 42d00bd3-71fa-4c26-a544-489326163d88/42d00bd3-71fa-4c26-a544-489326163d88.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1345.493424] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1345.493821] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79cf3fc9-d411-41ff-88a8-a00b0b17178f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.503209] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1345.503209] env[62820]: value = "task-1695151" [ 1345.503209] env[62820]: _type = "Task" [ 1345.503209] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.183473] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1346.184059] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Releasing lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.184357] env[62820]: DEBUG nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Instance network_info: |[{"id": "b5622bc1-fd38-457a-9f31-249b2c1721ce", "address": "fa:16:3e:c3:4c:f6", "network": {"id": "cd549f33-30d7-42c9-93f3-fef740c084f5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-998216747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561ded662f04b3eb420b60ca3345771", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5622bc1-fd", "ovs_interfaceid": "b5622bc1-fd38-457a-9f31-249b2c1721ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1346.188571] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.191715] env[62820]: DEBUG nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1346.193746] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695151, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.194337] env[62820]: WARNING oslo_vmware.common.loopingcall [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] task run outlasted interval by 0.19048500000000002 sec [ 1346.195564] env[62820]: DEBUG nova.policy [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7a0e8f276074325b78193cb7a2a3a02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1730db17199844cd8833f1176d249b0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1346.197366] env[62820]: DEBUG nova.compute.manager [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1346.202365] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-77eb65ec-3c28-4b4c-a845-633ab4089063 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.202822] env[62820]: DEBUG oslo_concurrency.lockutils [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] Acquired lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.203134] env[62820]: DEBUG nova.network.neutron [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Refreshing network info cache for port b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1346.204528] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:4c:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5622bc1-fd38-457a-9f31-249b2c1721ce', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1346.211856] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Creating folder: Project (8561ded662f04b3eb420b60ca3345771). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1346.216284] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8a0498-7bed-47ed-b770-37155e6ac0a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.221255] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe64ab64-1d21-40f2-bc49-9e54950ceecb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.222738] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.223085] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.242616] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1346.242616] env[62820]: value = "task-1695152" [ 1346.242616] env[62820]: _type = "Task" [ 1346.242616] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.250597] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695151, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130229} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.255446] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1346.255876] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Created folder: Project (8561ded662f04b3eb420b60ca3345771) in parent group-v353379. [ 1346.256133] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Creating folder: Instances. Parent ref: group-v353435. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1346.257132] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ced62f8-8982-4f68-8098-0b39a1ceead3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.260277] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cdcf8d2e-33a7-4586-8ac9-6c7ab7232888 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.265343] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695152, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.290192] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 42d00bd3-71fa-4c26-a544-489326163d88/42d00bd3-71fa-4c26-a544-489326163d88.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1346.298939] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f093b9cc-7ea8-4916-b0b9-d57b84c1b34f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.318185] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Created folder: Instances in parent group-v353435. [ 1346.318185] env[62820]: DEBUG oslo.service.loopingcall [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1346.321209] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1346.322719] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6bd7d76-bbe8-4d6f-9abb-163094b54121 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.343258] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1346.343258] env[62820]: value = "task-1695155" [ 1346.343258] env[62820]: _type = "Task" [ 1346.343258] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.348436] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1346.348436] env[62820]: value = "task-1695156" [ 1346.348436] env[62820]: _type = "Task" [ 1346.348436] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.356130] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.364040] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695156, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.627959] env[62820]: DEBUG nova.network.neutron [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Updated VIF entry in instance network info cache for port b5622bc1-fd38-457a-9f31-249b2c1721ce. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1346.628740] env[62820]: DEBUG nova.network.neutron [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Updating instance_info_cache with network_info: [{"id": "b5622bc1-fd38-457a-9f31-249b2c1721ce", "address": "fa:16:3e:c3:4c:f6", "network": {"id": "cd549f33-30d7-42c9-93f3-fef740c084f5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-998216747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561ded662f04b3eb420b60ca3345771", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5622bc1-fd", "ovs_interfaceid": "b5622bc1-fd38-457a-9f31-249b2c1721ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.678566] env[62820]: DEBUG nova.network.neutron [req-bb9586af-8cb2-4470-bcd8-de676c9fd17d req-eb1f7b79-4c13-4fd3-a3b8-c06dbfb4cc29 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updated VIF entry in instance network info cache for port 99ce12db-7b90-44f1-8086-9f95246773fe. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1346.678908] env[62820]: DEBUG nova.network.neutron [req-bb9586af-8cb2-4470-bcd8-de676c9fd17d req-eb1f7b79-4c13-4fd3-a3b8-c06dbfb4cc29 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updating instance_info_cache with network_info: [{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.758414] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695152, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.768390] env[62820]: DEBUG nova.network.neutron [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Successfully created port: cc74739f-914e-44f2-aa7c-dd0cef391791 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1346.858666] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695155, 'name': ReconfigVM_Task, 'duration_secs': 0.513763} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.861920] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 42d00bd3-71fa-4c26-a544-489326163d88/42d00bd3-71fa-4c26-a544-489326163d88.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1346.862632] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695156, 'name': CreateVM_Task, 'duration_secs': 0.448084} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.865419] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-751dda9d-bce4-44d8-80d1-5ee070b72e7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.867090] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1346.868064] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.868288] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.868703] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1346.869436] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-835b5762-9943-442e-a70e-3245dea52fe0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.873440] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1346.873440] env[62820]: value = "task-1695157" [ 1346.873440] env[62820]: _type = "Task" [ 1346.873440] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.875269] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1346.875269] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521156b9-4f62-7156-bc6e-16588fd851e3" [ 1346.875269] env[62820]: _type = "Task" [ 1346.875269] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.884566] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7ba306-1e2a-4ea2-a189-74c2b67010c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.891874] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695157, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.895888] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521156b9-4f62-7156-bc6e-16588fd851e3, 'name': SearchDatastore_Task, 'duration_secs': 0.015095} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.898522] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.899423] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1346.899822] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.900796] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.900796] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.900796] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca592cbf-faa5-472a-a9c2-bb40fc09a68a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.903776] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5701ba-b929-4600-b4b9-ffa08c59047b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.936227] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311b3acb-ccd9-420b-bd04-cbaeef93df67 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.939208] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.939431] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1346.940313] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d684315d-cca7-468b-ac2a-b21939931edb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.949612] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1346.949612] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a75ddc-fccf-dbaf-7ccb-ef943840b31d" [ 1346.949612] env[62820]: _type = "Task" [ 1346.949612] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.950893] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c6b758-56d1-4056-9e7a-b768d8224105 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.969197] env[62820]: DEBUG nova.compute.provider_tree [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.974469] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a75ddc-fccf-dbaf-7ccb-ef943840b31d, 'name': SearchDatastore_Task, 'duration_secs': 0.011812} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.975791] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b9c3f8a-e431-40d7-a067-dbf64e399a00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.981524] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1346.981524] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527dfad3-6b80-fd64-a54b-ed9aa08d1943" [ 1346.981524] env[62820]: _type = "Task" [ 1346.981524] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.990271] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527dfad3-6b80-fd64-a54b-ed9aa08d1943, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.137312] env[62820]: DEBUG oslo_concurrency.lockutils [req-b4761a2a-816d-4b78-9279-9d11347c50e2 req-846d3f29-d350-49f5-8a44-82dbd38b7f8a service nova] Releasing lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.181640] env[62820]: DEBUG oslo_concurrency.lockutils [req-bb9586af-8cb2-4470-bcd8-de676c9fd17d req-eb1f7b79-4c13-4fd3-a3b8-c06dbfb4cc29 service nova] Releasing lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.233917] env[62820]: DEBUG nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1347.258543] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d00db51-ef2f-433b-be2a-3f8010c20d6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.266385] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695152, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.271084] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1347.271881] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1347.271881] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1347.272052] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1347.272113] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1347.272485] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1347.272819] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1347.273107] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1347.273385] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1347.273560] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1347.273942] env[62820]: DEBUG nova.virt.hardware [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1347.274285] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Doing hard reboot of VM {{(pid=62820) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1347.275059] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e16f23c-d5f9-480b-8d89-87676fc46508 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.278520] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-766c2411-3b04-402d-82a6-92d052123758 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.287161] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ee875c-cd8f-42a6-b32d-33bcdc471bf6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.291488] env[62820]: DEBUG oslo_vmware.api [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1347.291488] env[62820]: value = "task-1695158" [ 1347.291488] env[62820]: _type = "Task" [ 1347.291488] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.309382] env[62820]: DEBUG oslo_vmware.api [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695158, 'name': ResetVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.384598] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695157, 'name': Rename_Task, 'duration_secs': 0.19979} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.384981] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1347.385326] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ce734ec-2066-4af6-8e27-e2a0a57b3bf1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.392273] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1347.392273] env[62820]: value = "task-1695159" [ 1347.392273] env[62820]: _type = "Task" [ 1347.392273] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.402165] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695159, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.479086] env[62820]: DEBUG nova.scheduler.client.report [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1347.492427] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527dfad3-6b80-fd64-a54b-ed9aa08d1943, 'name': SearchDatastore_Task, 'duration_secs': 0.010515} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.493499] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.493778] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a06d736c-a704-46e8-a6f7-85d8be40804f/a06d736c-a704-46e8-a6f7-85d8be40804f.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1347.494056] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2496c0e3-8ad8-4bb3-969f-03a813c24cd8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.501296] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1347.501296] env[62820]: value = "task-1695160" [ 1347.501296] env[62820]: _type = "Task" [ 1347.501296] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.510554] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695160, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.759754] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695152, 'name': CloneVM_Task, 'duration_secs': 1.272615} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.760179] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Created linked-clone VM from snapshot [ 1347.761114] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343cecc4-b375-40bd-9d50-c0723ed1dd08 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.771716] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Uploading image c17ed515-5684-4538-926e-6296e80a94b8 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1347.803316] env[62820]: DEBUG oslo_vmware.api [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695158, 'name': ResetVM_Task, 'duration_secs': 0.09599} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.803316] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Did hard reboot of VM {{(pid=62820) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1347.803519] env[62820]: DEBUG nova.compute.manager [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1347.804451] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a533b7-02f5-4ae8-989f-9fbead79fa36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.808812] env[62820]: DEBUG oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1347.808812] env[62820]: value = "vm-353437" [ 1347.808812] env[62820]: _type = "VirtualMachine" [ 1347.808812] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1347.809194] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bbb76bd3-db01-4bed-b4d5-d2312450b4bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.819193] env[62820]: DEBUG oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lease: (returnval){ [ 1347.819193] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d7cc83-c2b6-f40e-7204-8a626846502a" [ 1347.819193] env[62820]: _type = "HttpNfcLease" [ 1347.819193] env[62820]: } obtained for exporting VM: (result){ [ 1347.819193] env[62820]: value = "vm-353437" [ 1347.819193] env[62820]: _type = "VirtualMachine" [ 1347.819193] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1347.819456] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the lease: (returnval){ [ 1347.819456] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d7cc83-c2b6-f40e-7204-8a626846502a" [ 1347.819456] env[62820]: _type = "HttpNfcLease" [ 1347.819456] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1347.829023] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1347.829023] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d7cc83-c2b6-f40e-7204-8a626846502a" [ 1347.829023] env[62820]: _type = "HttpNfcLease" [ 1347.829023] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1347.905848] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695159, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.983097] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.015s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.983248] env[62820]: DEBUG nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1347.986024] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.712s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.986274] env[62820]: DEBUG nova.objects.instance [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lazy-loading 'resources' on Instance uuid 043e14a3-df5a-4098-b147-c6460bb85423 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1348.016366] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695160, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.326506] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e32e685-1dfa-4e8d-978d-265f5900580d tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.036s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.333486] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1348.333486] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d7cc83-c2b6-f40e-7204-8a626846502a" [ 1348.333486] env[62820]: _type = "HttpNfcLease" [ 1348.333486] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1348.333927] env[62820]: DEBUG oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1348.333927] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d7cc83-c2b6-f40e-7204-8a626846502a" [ 1348.333927] env[62820]: _type = "HttpNfcLease" [ 1348.333927] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1348.334988] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88013358-a1ac-4a1c-8111-625445079e70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.344404] env[62820]: DEBUG oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526833f8-86c2-867b-7f71-22559cbf94ad/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1348.344636] env[62820]: DEBUG oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526833f8-86c2-867b-7f71-22559cbf94ad/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1348.408545] env[62820]: DEBUG nova.compute.manager [req-aadbf38a-27fc-436d-a439-d21134645021 req-3a3a38ee-5cde-442c-a7fe-da8620977355 service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Received event network-vif-plugged-cc74739f-914e-44f2-aa7c-dd0cef391791 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1348.408820] env[62820]: DEBUG oslo_concurrency.lockutils [req-aadbf38a-27fc-436d-a439-d21134645021 req-3a3a38ee-5cde-442c-a7fe-da8620977355 service nova] Acquiring lock "93098210-ca91-41b4-9b12-96fa105a2ab3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.409078] env[62820]: DEBUG oslo_concurrency.lockutils [req-aadbf38a-27fc-436d-a439-d21134645021 req-3a3a38ee-5cde-442c-a7fe-da8620977355 service nova] Lock "93098210-ca91-41b4-9b12-96fa105a2ab3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.409327] env[62820]: DEBUG oslo_concurrency.lockutils [req-aadbf38a-27fc-436d-a439-d21134645021 req-3a3a38ee-5cde-442c-a7fe-da8620977355 service nova] Lock "93098210-ca91-41b4-9b12-96fa105a2ab3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.409551] env[62820]: DEBUG nova.compute.manager [req-aadbf38a-27fc-436d-a439-d21134645021 req-3a3a38ee-5cde-442c-a7fe-da8620977355 service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] No waiting events found dispatching network-vif-plugged-cc74739f-914e-44f2-aa7c-dd0cef391791 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1348.409758] env[62820]: WARNING nova.compute.manager [req-aadbf38a-27fc-436d-a439-d21134645021 req-3a3a38ee-5cde-442c-a7fe-da8620977355 service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Received unexpected event network-vif-plugged-cc74739f-914e-44f2-aa7c-dd0cef391791 for instance with vm_state building and task_state spawning. [ 1348.418110] env[62820]: DEBUG oslo_vmware.api [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695159, 'name': PowerOnVM_Task, 'duration_secs': 0.761121} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.418401] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1348.418648] env[62820]: INFO nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Took 8.55 seconds to spawn the instance on the hypervisor. [ 1348.418870] env[62820]: DEBUG nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1348.419674] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c7a045-6887-432a-89ba-0ed828f0ca40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.494648] env[62820]: DEBUG nova.compute.utils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1348.498228] env[62820]: DEBUG nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1348.498228] env[62820]: DEBUG nova.network.neutron [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1348.498627] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-85a1a447-8a72-46fe-a747-c7f5dbcb16ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.502933] env[62820]: DEBUG nova.network.neutron [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Successfully updated port: cc74739f-914e-44f2-aa7c-dd0cef391791 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1348.526482] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695160, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615031} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.529400] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a06d736c-a704-46e8-a6f7-85d8be40804f/a06d736c-a704-46e8-a6f7-85d8be40804f.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1348.529753] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1348.533087] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ac8c215-2b9d-4875-8892-1c621cc31da7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.546207] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1348.546207] env[62820]: value = "task-1695162" [ 1348.546207] env[62820]: _type = "Task" [ 1348.546207] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.560440] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695162, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.564199] env[62820]: DEBUG nova.policy [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ebf04c75f0045c5863359374668c8e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4df39d4dfabf4cc4ba8761eb3a0ce73a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1348.941896] env[62820]: INFO nova.compute.manager [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Took 30.38 seconds to build instance. [ 1348.994661] env[62820]: DEBUG nova.network.neutron [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Successfully created port: 1fe640fa-567b-4c64-8ce9-b029e4fe2c5f {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1349.004793] env[62820]: DEBUG nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1349.017046] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "refresh_cache-93098210-ca91-41b4-9b12-96fa105a2ab3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.017358] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "refresh_cache-93098210-ca91-41b4-9b12-96fa105a2ab3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.017426] env[62820]: DEBUG nova.network.neutron [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1349.038400] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16b9458-e4c7-4344-b24b-26bb26030a4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.053141] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e545220-f15c-435c-97d0-e18b50f0c2cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.100156] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02251cea-e52b-410a-bbd9-cab3758e8a88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.104119] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695162, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086406} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.104946] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1349.106511] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d70308-e9fb-4baa-88c8-9dca4b71b3e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.114167] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fff9081-6cea-472a-87d8-146b9c8783c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.140129] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] a06d736c-a704-46e8-a6f7-85d8be40804f/a06d736c-a704-46e8-a6f7-85d8be40804f.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1349.141157] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc87aa2e-acfc-4f5a-ab6b-819b0d7fcfa0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.166315] env[62820]: DEBUG nova.compute.provider_tree [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.175303] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1349.175303] env[62820]: value = "task-1695163" [ 1349.175303] env[62820]: _type = "Task" [ 1349.175303] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.183577] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695163, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.448364] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81f917e4-5a63-46e8-972f-1fb3dc26c400 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "42d00bd3-71fa-4c26-a544-489326163d88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.302s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.592960] env[62820]: DEBUG nova.network.neutron [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1349.673373] env[62820]: DEBUG nova.scheduler.client.report [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1349.689260] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695163, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.820361] env[62820]: DEBUG nova.network.neutron [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Updating instance_info_cache with network_info: [{"id": "cc74739f-914e-44f2-aa7c-dd0cef391791", "address": "fa:16:3e:2a:9d:1c", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc74739f-91", "ovs_interfaceid": "cc74739f-914e-44f2-aa7c-dd0cef391791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.924203] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.924506] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.924721] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.924908] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.925167] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.927422] env[62820]: INFO nova.compute.manager [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Terminating instance [ 1349.951584] env[62820]: DEBUG nova.compute.manager [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1349.977273] env[62820]: DEBUG nova.compute.manager [None req-738260dc-eb9a-468e-948b-52452ae986b2 tempest-ServerDiagnosticsTest-16303950 tempest-ServerDiagnosticsTest-16303950-project-admin] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1349.978945] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bd466e-9f34-427e-a8a7-13afb748b0f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.987868] env[62820]: INFO nova.compute.manager [None req-738260dc-eb9a-468e-948b-52452ae986b2 tempest-ServerDiagnosticsTest-16303950 tempest-ServerDiagnosticsTest-16303950-project-admin] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Retrieving diagnostics [ 1349.988745] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be82c9a7-9b00-4dcb-9266-7193fd85b0f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.023218] env[62820]: DEBUG nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1350.186334] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.200s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.188562] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695163, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.189328] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.768s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.189759] env[62820]: DEBUG nova.objects.instance [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lazy-loading 'resources' on Instance uuid aacc6f1c-56d6-43b9-9c40-5ea49b40a657 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1350.210523] env[62820]: INFO nova.scheduler.client.report [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Deleted allocations for instance 043e14a3-df5a-4098-b147-c6460bb85423 [ 1350.323139] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "refresh_cache-93098210-ca91-41b4-9b12-96fa105a2ab3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.323507] env[62820]: DEBUG nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Instance network_info: |[{"id": "cc74739f-914e-44f2-aa7c-dd0cef391791", "address": "fa:16:3e:2a:9d:1c", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc74739f-91", "ovs_interfaceid": "cc74739f-914e-44f2-aa7c-dd0cef391791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1350.323946] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:9d:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3753f451-fa23-4988-9361-074fb0bd3fd4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc74739f-914e-44f2-aa7c-dd0cef391791', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1350.331347] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Creating folder: Project (1730db17199844cd8833f1176d249b0c). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1350.331997] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26aaf58a-671c-4080-9a6a-1dcf5f49f090 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.343474] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Created folder: Project (1730db17199844cd8833f1176d249b0c) in parent group-v353379. [ 1350.343663] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Creating folder: Instances. Parent ref: group-v353439. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1350.343915] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2157c23e-67b9-4b5d-8c5c-3bd48fd84d5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.353391] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Created folder: Instances in parent group-v353439. [ 1350.353637] env[62820]: DEBUG oslo.service.loopingcall [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1350.353833] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1350.354051] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1287610-1883-434b-8526-a7121f239f7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.373122] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1350.373122] env[62820]: value = "task-1695166" [ 1350.373122] env[62820]: _type = "Task" [ 1350.373122] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.380959] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695166, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.431630] env[62820]: DEBUG nova.compute.manager [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1350.431859] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1350.432979] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33bf9af-6a97-41d2-9baa-eb70fc7e497c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.443725] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1350.444087] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd98e305-d6d6-4077-9a5f-65161e9316ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.451091] env[62820]: DEBUG oslo_vmware.api [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1350.451091] env[62820]: value = "task-1695167" [ 1350.451091] env[62820]: _type = "Task" [ 1350.451091] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.462407] env[62820]: DEBUG oslo_vmware.api [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695167, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.475346] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.688283] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695163, 'name': ReconfigVM_Task, 'duration_secs': 1.447652} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.688621] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Reconfigured VM instance instance-00000012 to attach disk [datastore1] a06d736c-a704-46e8-a6f7-85d8be40804f/a06d736c-a704-46e8-a6f7-85d8be40804f.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1350.689329] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1a0de26-9ca6-471d-b202-7207145945ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.699434] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1350.699434] env[62820]: value = "task-1695168" [ 1350.699434] env[62820]: _type = "Task" [ 1350.699434] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.709601] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695168, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.720758] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c84799aa-a977-4efd-9347-609f8090720e tempest-ServerDiagnosticsNegativeTest-368883270 tempest-ServerDiagnosticsNegativeTest-368883270-project-member] Lock "043e14a3-df5a-4098-b147-c6460bb85423" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.817s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.882885] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695166, 'name': CreateVM_Task, 'duration_secs': 0.437254} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.883247] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.884141] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.884428] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.884886] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1350.887955] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0c9f1b2-982e-47cb-a920-66c88b0d80e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.893539] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1350.893539] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52da4d40-20d9-ed26-30b5-921216a4add0" [ 1350.893539] env[62820]: _type = "Task" [ 1350.893539] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.902504] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52da4d40-20d9-ed26-30b5-921216a4add0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.965711] env[62820]: DEBUG oslo_vmware.api [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695167, 'name': PowerOffVM_Task, 'duration_secs': 0.295004} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.966098] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1350.966326] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1350.966507] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d10bf47-4436-413f-a0ef-7b580cfeba75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.085814] env[62820]: DEBUG nova.network.neutron [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Successfully updated port: 1fe640fa-567b-4c64-8ce9-b029e4fe2c5f {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1351.106230] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1351.106472] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1351.106651] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Deleting the datastore file [datastore1] f2658dfa-baed-4ff3-8c7e-733bbcf1916e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1351.106970] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ec9a37a-936d-480a-9735-7f1174123273 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.115113] env[62820]: DEBUG oslo_vmware.api [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for the task: (returnval){ [ 1351.115113] env[62820]: value = "task-1695170" [ 1351.115113] env[62820]: _type = "Task" [ 1351.115113] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.124917] env[62820]: DEBUG oslo_vmware.api [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.196726] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e663c484-a96e-4e5e-8271-49b613f45b80 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.211066] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c8ea27-0436-4af8-904a-d10ae72a7956 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.214615] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695168, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.246753] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41211af2-d51b-47aa-8250-2aedc80bcc74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.255247] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9175f740-515a-4825-af02-3a9eddc566f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.270974] env[62820]: DEBUG nova.compute.provider_tree [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1351.404445] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52da4d40-20d9-ed26-30b5-921216a4add0, 'name': SearchDatastore_Task, 'duration_secs': 0.015664} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.404805] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.405123] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1351.405404] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.405571] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.405810] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1351.406168] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-265fdd0c-edaf-4f49-942a-a0d8d594708d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.420642] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1351.420892] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1351.421787] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d17ff84d-47b7-4b9d-9964-769917e8e695 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.429023] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1351.429023] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bce113-7a2a-98a7-dcef-bb6401434f15" [ 1351.429023] env[62820]: _type = "Task" [ 1351.429023] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.443583] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bce113-7a2a-98a7-dcef-bb6401434f15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.589236] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquiring lock "refresh_cache-15e95a20-2729-46c6-a613-32aa353ed329" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.589664] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquired lock "refresh_cache-15e95a20-2729-46c6-a613-32aa353ed329" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.589980] env[62820]: DEBUG nova.network.neutron [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1351.624879] env[62820]: DEBUG oslo_vmware.api [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Task: {'id': task-1695170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291398} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.625181] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1351.625368] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1351.625541] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1351.625711] env[62820]: INFO nova.compute.manager [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1351.625965] env[62820]: DEBUG oslo.service.loopingcall [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1351.626229] env[62820]: DEBUG nova.compute.manager [-] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1351.626328] env[62820]: DEBUG nova.network.neutron [-] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1351.711238] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695168, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.774657] env[62820]: DEBUG nova.scheduler.client.report [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1351.942385] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bce113-7a2a-98a7-dcef-bb6401434f15, 'name': SearchDatastore_Task, 'duration_secs': 0.011145} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.943577] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-889eb5b8-b0f6-433b-b5c1-5ed94a69bed1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.951132] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1351.951132] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521cbf52-1ae2-aad2-c177-34c25078b071" [ 1351.951132] env[62820]: _type = "Task" [ 1351.951132] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.963848] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521cbf52-1ae2-aad2-c177-34c25078b071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.122434] env[62820]: DEBUG nova.network.neutron [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1352.210305] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695168, 'name': Rename_Task, 'duration_secs': 1.163404} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.210598] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1352.210841] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2428240c-7445-4255-8ad7-0b870abe1f81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.217615] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1352.217615] env[62820]: value = "task-1695171" [ 1352.217615] env[62820]: _type = "Task" [ 1352.217615] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.227089] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.264759] env[62820]: DEBUG nova.network.neutron [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Updating instance_info_cache with network_info: [{"id": "1fe640fa-567b-4c64-8ce9-b029e4fe2c5f", "address": "fa:16:3e:83:b6:36", "network": {"id": "ef240538-f411-4369-8a94-2d83c48a72ca", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-982440130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4df39d4dfabf4cc4ba8761eb3a0ce73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359c2c31-99c4-41d7-a513-3bc4825897a0", "external-id": "nsx-vlan-transportzone-173", "segmentation_id": 173, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fe640fa-56", "ovs_interfaceid": "1fe640fa-567b-4c64-8ce9-b029e4fe2c5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.279938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.282870] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.586s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.283176] env[62820]: DEBUG nova.objects.instance [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lazy-loading 'resources' on Instance uuid b3d1f811-1d28-40f7-8bf8-c29eb64896c0 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1352.306228] env[62820]: INFO nova.scheduler.client.report [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Deleted allocations for instance aacc6f1c-56d6-43b9-9c40-5ea49b40a657 [ 1352.337161] env[62820]: DEBUG nova.network.neutron [-] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.462870] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521cbf52-1ae2-aad2-c177-34c25078b071, 'name': SearchDatastore_Task, 'duration_secs': 0.011625} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.463140] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.463429] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 93098210-ca91-41b4-9b12-96fa105a2ab3/93098210-ca91-41b4-9b12-96fa105a2ab3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1352.463665] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-842d90cd-3a9a-40f6-a129-19a318fa4dd8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.474397] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1352.474397] env[62820]: value = "task-1695172" [ 1352.474397] env[62820]: _type = "Task" [ 1352.474397] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.484335] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.729050] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695171, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.767914] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Releasing lock "refresh_cache-15e95a20-2729-46c6-a613-32aa353ed329" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.768306] env[62820]: DEBUG nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Instance network_info: |[{"id": "1fe640fa-567b-4c64-8ce9-b029e4fe2c5f", "address": "fa:16:3e:83:b6:36", "network": {"id": "ef240538-f411-4369-8a94-2d83c48a72ca", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-982440130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4df39d4dfabf4cc4ba8761eb3a0ce73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359c2c31-99c4-41d7-a513-3bc4825897a0", "external-id": "nsx-vlan-transportzone-173", "segmentation_id": 173, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fe640fa-56", "ovs_interfaceid": "1fe640fa-567b-4c64-8ce9-b029e4fe2c5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1352.812764] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ebce537-9c01-48d8-8bed-9e21e9c792c5 tempest-ServerShowV254Test-1295297472 tempest-ServerShowV254Test-1295297472-project-member] Lock "aacc6f1c-56d6-43b9-9c40-5ea49b40a657" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.827s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.840467] env[62820]: INFO nova.compute.manager [-] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Took 1.21 seconds to deallocate network for instance. [ 1352.985305] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695172, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.216593] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3630485-310f-4649-8826-cbbc20f20837 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.228619] env[62820]: DEBUG oslo_vmware.api [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695171, 'name': PowerOnVM_Task, 'duration_secs': 0.654798} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.230642] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1353.230868] env[62820]: INFO nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Took 10.59 seconds to spawn the instance on the hypervisor. [ 1353.231066] env[62820]: DEBUG nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1353.231867] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6186f244-5c74-44ee-8003-53027368efd2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.234992] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04846104-9fba-40ab-aad3-47c9c6031d59 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.267397] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b495d1a9-df7b-4b4b-8ae5-2de40cf7723a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.281057] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ba2f1c-5e50-4f21-b344-940f34d6fadf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.297698] env[62820]: DEBUG nova.compute.provider_tree [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1353.349166] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.489767] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695172, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.780843] env[62820]: INFO nova.compute.manager [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Took 33.54 seconds to build instance. [ 1353.800406] env[62820]: DEBUG nova.scheduler.client.report [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1353.988370] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695172, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.071325} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.988639] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 93098210-ca91-41b4-9b12-96fa105a2ab3/93098210-ca91-41b4-9b12-96fa105a2ab3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1353.988854] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1353.989126] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-402001bc-2d94-42d6-958e-72acd33efeb7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.996319] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1353.996319] env[62820]: value = "task-1695173" [ 1353.996319] env[62820]: _type = "Task" [ 1353.996319] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.005872] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695173, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.283180] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aabe300e-2182-4936-8551-b4a9e0706fe4 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.823s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.305638] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.023s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.308363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.743s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.310034] env[62820]: INFO nova.compute.claims [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1354.329850] env[62820]: INFO nova.scheduler.client.report [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Deleted allocations for instance b3d1f811-1d28-40f7-8bf8-c29eb64896c0 [ 1354.506175] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695173, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082037} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.506460] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1354.507266] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a941d6-cc96-4910-8f71-749c59a1391e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.529225] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 93098210-ca91-41b4-9b12-96fa105a2ab3/93098210-ca91-41b4-9b12-96fa105a2ab3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1354.529545] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d62d19b2-3627-4639-a6bf-a8c9065fa4a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.549905] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1354.549905] env[62820]: value = "task-1695174" [ 1354.549905] env[62820]: _type = "Task" [ 1354.549905] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.558369] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695174, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.786212] env[62820]: DEBUG nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1354.839298] env[62820]: DEBUG oslo_concurrency.lockutils [None req-44eec3d5-5e4d-4a17-b9df-3c1fce65089d tempest-TenantUsagesTestJSON-1106564838 tempest-TenantUsagesTestJSON-1106564838-project-member] Lock "b3d1f811-1d28-40f7-8bf8-c29eb64896c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.937s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.061711] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695174, 'name': ReconfigVM_Task, 'duration_secs': 0.387334} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.062090] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 93098210-ca91-41b4-9b12-96fa105a2ab3/93098210-ca91-41b4-9b12-96fa105a2ab3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1355.062724] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-286bf298-8ae0-4a6a-9968-734138eb915d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.069900] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1355.069900] env[62820]: value = "task-1695175" [ 1355.069900] env[62820]: _type = "Task" [ 1355.069900] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.079184] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695175, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.307727] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.583871] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695175, 'name': Rename_Task, 'duration_secs': 0.169376} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.584224] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1355.584520] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ca6a67e-ad47-44e5-ba8d-36285fb600e2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.590959] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1355.590959] env[62820]: value = "task-1695176" [ 1355.590959] env[62820]: _type = "Task" [ 1355.590959] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.600042] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695176, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.726487] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ff61f4-5dd9-4600-a609-7324c5caa8e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.734200] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba86253-aa06-4847-b568-7efab04bf8f8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.765373] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84da2e86-c789-4e50-8b0c-d25678f70aeb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.773573] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e1ae50-cf7e-4b2d-90b0-c919b3c65b10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.787851] env[62820]: DEBUG nova.compute.provider_tree [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.102310] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695176, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.146743] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1356.147012] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1356.147181] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.147425] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1356.147547] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.147665] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1356.147872] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1356.148047] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1356.148224] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1356.148389] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1356.148562] env[62820]: DEBUG nova.virt.hardware [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1356.150800] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22d5725-af30-4a7e-93bb-696f235563f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.162244] env[62820]: DEBUG oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c11f63-5fcd-8129-0f13-8ee865876502/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1356.163567] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800739ff-4cf1-46fd-b40e-bbae7393c7b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.167933] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cfe2fd-ff54-4254-a23e-e195d4a32553 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.180642] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:b6:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359c2c31-99c4-41d7-a513-3bc4825897a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fe640fa-567b-4c64-8ce9-b029e4fe2c5f', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1356.188025] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Creating folder: Project (4df39d4dfabf4cc4ba8761eb3a0ce73a). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1356.189981] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44434438-1f55-4207-aa2d-da151bbe7f50 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.191688] env[62820]: DEBUG oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c11f63-5fcd-8129-0f13-8ee865876502/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1356.191865] env[62820]: ERROR oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c11f63-5fcd-8129-0f13-8ee865876502/disk-0.vmdk due to incomplete transfer. [ 1356.192097] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a2164165-4e33-4926-8200-f8cc580b8656 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.199932] env[62820]: DEBUG oslo_vmware.rw_handles [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c11f63-5fcd-8129-0f13-8ee865876502/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1356.200155] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Uploaded image 21d78180-c8ca-41c8-929a-50aaa66fa080 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1356.202759] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1356.204440] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e5651733-16c6-48d8-91fc-d8fb45f726b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.205999] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Created folder: Project (4df39d4dfabf4cc4ba8761eb3a0ce73a) in parent group-v353379. [ 1356.206223] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Creating folder: Instances. Parent ref: group-v353442. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1356.206455] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dda9d81d-ea4f-4602-96e6-4f99071df0b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.211824] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1356.211824] env[62820]: value = "task-1695178" [ 1356.211824] env[62820]: _type = "Task" [ 1356.211824] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.218026] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Created folder: Instances in parent group-v353442. [ 1356.218251] env[62820]: DEBUG oslo.service.loopingcall [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1356.221266] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1356.221486] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695178, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.221696] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cff97ed-c2c4-41c4-a8dc-7bad5ca23d70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.243649] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1356.243649] env[62820]: value = "task-1695180" [ 1356.243649] env[62820]: _type = "Task" [ 1356.243649] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.253045] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695180, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.292044] env[62820]: DEBUG nova.scheduler.client.report [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1356.338925] env[62820]: DEBUG nova.compute.manager [req-627f2f76-7b84-460c-b361-1af738bbf829 req-d2febcfb-eeb4-44c3-92c7-d2010151dd0d service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Received event network-changed-cc74739f-914e-44f2-aa7c-dd0cef391791 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1356.339414] env[62820]: DEBUG nova.compute.manager [req-627f2f76-7b84-460c-b361-1af738bbf829 req-d2febcfb-eeb4-44c3-92c7-d2010151dd0d service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Refreshing instance network info cache due to event network-changed-cc74739f-914e-44f2-aa7c-dd0cef391791. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1356.339414] env[62820]: DEBUG oslo_concurrency.lockutils [req-627f2f76-7b84-460c-b361-1af738bbf829 req-d2febcfb-eeb4-44c3-92c7-d2010151dd0d service nova] Acquiring lock "refresh_cache-93098210-ca91-41b4-9b12-96fa105a2ab3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.339414] env[62820]: DEBUG oslo_concurrency.lockutils [req-627f2f76-7b84-460c-b361-1af738bbf829 req-d2febcfb-eeb4-44c3-92c7-d2010151dd0d service nova] Acquired lock "refresh_cache-93098210-ca91-41b4-9b12-96fa105a2ab3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.339623] env[62820]: DEBUG nova.network.neutron [req-627f2f76-7b84-460c-b361-1af738bbf829 req-d2febcfb-eeb4-44c3-92c7-d2010151dd0d service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Refreshing network info cache for port cc74739f-914e-44f2-aa7c-dd0cef391791 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1356.603339] env[62820]: DEBUG oslo_vmware.api [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695176, 'name': PowerOnVM_Task, 'duration_secs': 0.767309} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.603660] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1356.603880] env[62820]: INFO nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Took 9.37 seconds to spawn the instance on the hypervisor. [ 1356.604085] env[62820]: DEBUG nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.605279] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa530bc-a383-435a-9359-f0189b6e82a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.724024] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695178, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.756924] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695180, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.797103] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.489s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.797620] env[62820]: DEBUG nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1356.800296] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.654s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.801867] env[62820]: INFO nova.compute.claims [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1356.906472] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquiring lock "42d00bd3-71fa-4c26-a544-489326163d88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.906725] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "42d00bd3-71fa-4c26-a544-489326163d88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.906920] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquiring lock "42d00bd3-71fa-4c26-a544-489326163d88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.907041] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "42d00bd3-71fa-4c26-a544-489326163d88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.907213] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "42d00bd3-71fa-4c26-a544-489326163d88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.909469] env[62820]: INFO nova.compute.manager [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Terminating instance [ 1357.126135] env[62820]: INFO nova.compute.manager [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Took 33.91 seconds to build instance. [ 1357.193425] env[62820]: DEBUG nova.network.neutron [req-627f2f76-7b84-460c-b361-1af738bbf829 req-d2febcfb-eeb4-44c3-92c7-d2010151dd0d service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Updated VIF entry in instance network info cache for port cc74739f-914e-44f2-aa7c-dd0cef391791. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1357.193828] env[62820]: DEBUG nova.network.neutron [req-627f2f76-7b84-460c-b361-1af738bbf829 req-d2febcfb-eeb4-44c3-92c7-d2010151dd0d service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Updating instance_info_cache with network_info: [{"id": "cc74739f-914e-44f2-aa7c-dd0cef391791", "address": "fa:16:3e:2a:9d:1c", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc74739f-91", "ovs_interfaceid": "cc74739f-914e-44f2-aa7c-dd0cef391791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.223929] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695178, 'name': Destroy_Task, 'duration_secs': 0.621234} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.224241] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Destroyed the VM [ 1357.224621] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1357.224875] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ae208664-6ce1-450f-b82d-fc29aad80dbc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.232014] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1357.232014] env[62820]: value = "task-1695181" [ 1357.232014] env[62820]: _type = "Task" [ 1357.232014] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.241567] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695181, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.254498] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695180, 'name': CreateVM_Task, 'duration_secs': 0.542455} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.255176] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1357.255883] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.256314] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.256414] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1357.256677] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-734697c7-ae98-492a-9162-23584413bc94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.261305] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1357.261305] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52deb7fe-59fd-4046-3a24-a3cce8670a0a" [ 1357.261305] env[62820]: _type = "Task" [ 1357.261305] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.270869] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52deb7fe-59fd-4046-3a24-a3cce8670a0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.312060] env[62820]: DEBUG nova.compute.utils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1357.313740] env[62820]: DEBUG nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1357.313936] env[62820]: DEBUG nova.network.neutron [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1357.413289] env[62820]: DEBUG nova.compute.manager [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1357.413784] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1357.417029] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa51e509-2743-4420-b19b-f101a090d092 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.423188] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1357.426532] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02121a35-c3f0-49ba-80ae-a46f75e0a9f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.431058] env[62820]: DEBUG oslo_vmware.api [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1357.431058] env[62820]: value = "task-1695182" [ 1357.431058] env[62820]: _type = "Task" [ 1357.431058] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.432510] env[62820]: DEBUG nova.policy [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b328ffc83d344899fcbbb6e9ade1698', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bdc42fe98fb43d7bd92e2dd789aff93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1357.446590] env[62820]: DEBUG oslo_vmware.api [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695182, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.629743] env[62820]: DEBUG oslo_concurrency.lockutils [None req-64a2742e-2a4d-4940-81e3-3378c8b3d961 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "93098210-ca91-41b4-9b12-96fa105a2ab3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.646s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.702227] env[62820]: DEBUG oslo_concurrency.lockutils [req-627f2f76-7b84-460c-b361-1af738bbf829 req-d2febcfb-eeb4-44c3-92c7-d2010151dd0d service nova] Releasing lock "refresh_cache-93098210-ca91-41b4-9b12-96fa105a2ab3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.750970] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695181, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.773833] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52deb7fe-59fd-4046-3a24-a3cce8670a0a, 'name': SearchDatastore_Task, 'duration_secs': 0.018391} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.773833] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.773833] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1357.774191] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.774408] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.774659] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1357.774977] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a625e064-c2ae-4f78-ae46-cb29799d6066 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.785968] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1357.786184] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1357.786948] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f95a771-0a42-4975-87d6-62edef6dacd3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.798673] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1357.798673] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5239cb0b-1607-9edc-ed07-d6028ee0f884" [ 1357.798673] env[62820]: _type = "Task" [ 1357.798673] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.808415] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5239cb0b-1607-9edc-ed07-d6028ee0f884, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.821414] env[62820]: DEBUG nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1357.953242] env[62820]: DEBUG oslo_vmware.api [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695182, 'name': PowerOffVM_Task, 'duration_secs': 0.286355} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.954094] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1357.954417] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1357.957792] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2de81434-04c7-45c7-b74f-3017a66726a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.015415] env[62820]: DEBUG nova.network.neutron [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Successfully created port: a033acfd-d3ec-4c0f-a248-fb38c4e3533b {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1358.135157] env[62820]: DEBUG nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1358.207739] env[62820]: DEBUG oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526833f8-86c2-867b-7f71-22559cbf94ad/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1358.208124] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46ca2cc-aed8-40a8-b188-840184dc5c80 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.220203] env[62820]: DEBUG oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526833f8-86c2-867b-7f71-22559cbf94ad/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1358.220203] env[62820]: ERROR oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526833f8-86c2-867b-7f71-22559cbf94ad/disk-0.vmdk due to incomplete transfer. [ 1358.220203] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fb0bcb67-5ae6-4c9c-bf2c-4ba8997bb20b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.228189] env[62820]: DEBUG oslo_vmware.rw_handles [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526833f8-86c2-867b-7f71-22559cbf94ad/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1358.228189] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Uploaded image c17ed515-5684-4538-926e-6296e80a94b8 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1358.228539] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1358.231262] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-df2d617f-f2c8-4639-8e0f-7eec73fcf275 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.240766] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1358.240766] env[62820]: value = "task-1695184" [ 1358.240766] env[62820]: _type = "Task" [ 1358.240766] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.244049] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695181, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.253625] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695184, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.314931] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5239cb0b-1607-9edc-ed07-d6028ee0f884, 'name': SearchDatastore_Task, 'duration_secs': 0.017654} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.315745] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-850138b5-5132-4e52-9b86-29b3f5777b73 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.329575] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1358.329575] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5260b830-a7af-4cda-37ae-2e7333c51541" [ 1358.329575] env[62820]: _type = "Task" [ 1358.329575] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.343321] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5260b830-a7af-4cda-37ae-2e7333c51541, 'name': SearchDatastore_Task, 'duration_secs': 0.01683} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.343794] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.344474] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 15e95a20-2729-46c6-a613-32aa353ed329/15e95a20-2729-46c6-a613-32aa353ed329.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1358.344949] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-442dd343-e454-44d3-ac1b-cdb85f39a00f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.357018] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1358.357018] env[62820]: value = "task-1695185" [ 1358.357018] env[62820]: _type = "Task" [ 1358.357018] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.366479] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695185, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.409835] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb83067-0541-42a4-b458-91e196488a43 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.418333] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1806b41-ee0c-49a3-a8af-9be3a03e72f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.454958] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5881b01-4ac0-489b-9645-219898f29037 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.465520] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdd7892-4211-4ebc-a277-eb8277d81bcb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.481635] env[62820]: DEBUG nova.compute.provider_tree [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1358.529126] env[62820]: DEBUG nova.compute.manager [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Received event network-vif-plugged-1fe640fa-567b-4c64-8ce9-b029e4fe2c5f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1358.529475] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Acquiring lock "15e95a20-2729-46c6-a613-32aa353ed329-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.529620] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Lock "15e95a20-2729-46c6-a613-32aa353ed329-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.529777] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Lock "15e95a20-2729-46c6-a613-32aa353ed329-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.529944] env[62820]: DEBUG nova.compute.manager [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] No waiting events found dispatching network-vif-plugged-1fe640fa-567b-4c64-8ce9-b029e4fe2c5f {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1358.530122] env[62820]: WARNING nova.compute.manager [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Received unexpected event network-vif-plugged-1fe640fa-567b-4c64-8ce9-b029e4fe2c5f for instance with vm_state building and task_state spawning. [ 1358.530287] env[62820]: DEBUG nova.compute.manager [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Received event network-changed-1fe640fa-567b-4c64-8ce9-b029e4fe2c5f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1358.530439] env[62820]: DEBUG nova.compute.manager [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Refreshing instance network info cache due to event network-changed-1fe640fa-567b-4c64-8ce9-b029e4fe2c5f. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1358.530619] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Acquiring lock "refresh_cache-15e95a20-2729-46c6-a613-32aa353ed329" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.530751] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Acquired lock "refresh_cache-15e95a20-2729-46c6-a613-32aa353ed329" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.530901] env[62820]: DEBUG nova.network.neutron [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Refreshing network info cache for port 1fe640fa-567b-4c64-8ce9-b029e4fe2c5f {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1358.665277] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.745583] env[62820]: DEBUG oslo_vmware.api [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695181, 'name': RemoveSnapshot_Task, 'duration_secs': 1.148474} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.748819] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1358.749260] env[62820]: INFO nova.compute.manager [None req-5c87811d-b0f9-4262-a177-6f91eee1f40e tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Took 18.84 seconds to snapshot the instance on the hypervisor. [ 1358.756040] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695184, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.839024] env[62820]: DEBUG nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1358.865388] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695185, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.868457] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1358.868836] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1358.869061] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1358.869300] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1358.869508] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1358.869691] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1358.869947] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1358.870205] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1358.870442] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1358.870634] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1358.871914] env[62820]: DEBUG nova.virt.hardware [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1358.872886] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be36921-a692-4c3b-a187-bdf22f0abb4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.881496] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7802ef-8237-4849-9367-46cd08f71b95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.984912] env[62820]: DEBUG nova.scheduler.client.report [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1359.129795] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1359.130073] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1359.130313] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Deleting the datastore file [datastore1] 42d00bd3-71fa-4c26-a544-489326163d88 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.130637] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47b4dca7-3b4e-446c-b386-8e88dbd0d699 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.138207] env[62820]: DEBUG oslo_vmware.api [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for the task: (returnval){ [ 1359.138207] env[62820]: value = "task-1695186" [ 1359.138207] env[62820]: _type = "Task" [ 1359.138207] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.150255] env[62820]: DEBUG oslo_vmware.api [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.264596] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695184, 'name': Destroy_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.337498] env[62820]: DEBUG nova.network.neutron [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Updated VIF entry in instance network info cache for port 1fe640fa-567b-4c64-8ce9-b029e4fe2c5f. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1359.337498] env[62820]: DEBUG nova.network.neutron [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Updating instance_info_cache with network_info: [{"id": "1fe640fa-567b-4c64-8ce9-b029e4fe2c5f", "address": "fa:16:3e:83:b6:36", "network": {"id": "ef240538-f411-4369-8a94-2d83c48a72ca", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-982440130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4df39d4dfabf4cc4ba8761eb3a0ce73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359c2c31-99c4-41d7-a513-3bc4825897a0", "external-id": "nsx-vlan-transportzone-173", "segmentation_id": 173, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fe640fa-56", "ovs_interfaceid": "1fe640fa-567b-4c64-8ce9-b029e4fe2c5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.366621] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695185, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791698} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.366928] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 15e95a20-2729-46c6-a613-32aa353ed329/15e95a20-2729-46c6-a613-32aa353ed329.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1359.367164] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1359.367487] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c0dcf54-1712-4027-b6fc-ffdd83f3ec57 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.374248] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1359.374248] env[62820]: value = "task-1695187" [ 1359.374248] env[62820]: _type = "Task" [ 1359.374248] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.383779] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695187, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.493018] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.493018] env[62820]: DEBUG nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1359.494513] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.548s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.494874] env[62820]: DEBUG nova.objects.instance [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lazy-loading 'resources' on Instance uuid 7c5d1740-92ba-4d4b-a557-10f8ea58e883 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1359.652277] env[62820]: DEBUG oslo_vmware.api [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Task: {'id': task-1695186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182517} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.652582] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1359.652770] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1359.652945] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1359.653307] env[62820]: INFO nova.compute.manager [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1359.653420] env[62820]: DEBUG oslo.service.loopingcall [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1359.653621] env[62820]: DEBUG nova.compute.manager [-] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1359.653717] env[62820]: DEBUG nova.network.neutron [-] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1359.755971] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695184, 'name': Destroy_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.842618] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Releasing lock "refresh_cache-15e95a20-2729-46c6-a613-32aa353ed329" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.842618] env[62820]: DEBUG nova.compute.manager [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Received event network-vif-deleted-b9fb1075-80d0-4a63-a82b-80d3eedd8fe5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1359.842618] env[62820]: DEBUG nova.compute.manager [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Received event network-changed-b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1359.842618] env[62820]: DEBUG nova.compute.manager [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Refreshing instance network info cache due to event network-changed-b5622bc1-fd38-457a-9f31-249b2c1721ce. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1359.842618] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Acquiring lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.842618] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Acquired lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.842618] env[62820]: DEBUG nova.network.neutron [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Refreshing network info cache for port b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1359.895025] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695187, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068994} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.895025] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1359.895025] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd6104b-2d0a-4c4f-bf68-0440b56f3723 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.919999] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 15e95a20-2729-46c6-a613-32aa353ed329/15e95a20-2729-46c6-a613-32aa353ed329.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1359.920744] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2173eff-0627-4329-aa19-5112890a3003 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.945231] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1359.945231] env[62820]: value = "task-1695188" [ 1359.945231] env[62820]: _type = "Task" [ 1359.945231] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.954243] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695188, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.997928] env[62820]: DEBUG nova.compute.utils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1359.999386] env[62820]: DEBUG nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1360.003306] env[62820]: DEBUG nova.network.neutron [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1360.075771] env[62820]: DEBUG nova.policy [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1fc53730fe448acb0ed19d590dde60b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3edacaf37e34169a73932db948fa6d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1360.134021] env[62820]: DEBUG nova.network.neutron [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Successfully updated port: a033acfd-d3ec-4c0f-a248-fb38c4e3533b {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1360.260753] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695184, 'name': Destroy_Task, 'duration_secs': 2.016407} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.261207] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Destroyed the VM [ 1360.261602] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1360.261949] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-27f88b06-d32c-484d-9493-85a87625e79d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.272019] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1360.272019] env[62820]: value = "task-1695189" [ 1360.272019] env[62820]: _type = "Task" [ 1360.272019] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.278876] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695189, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.383802] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquiring lock "31639194-b0c4-4eb9-a6f4-e61b067c807f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.384339] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "31639194-b0c4-4eb9-a6f4-e61b067c807f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.455916] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695188, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.491335] env[62820]: DEBUG nova.network.neutron [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Successfully created port: 5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1360.509229] env[62820]: DEBUG nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1360.532763] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63387f4-7332-4c46-9607-4baf05a4c64d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.543939] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a3f819-abed-416a-b0b8-4dd320d00a15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.576890] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b5291d-ba10-450c-843c-9008b8ece83b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.585074] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04dcdcea-116f-40f5-a6ec-43e7bfc906b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.601106] env[62820]: DEBUG nova.network.neutron [-] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.604138] env[62820]: DEBUG nova.compute.provider_tree [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1360.640616] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "refresh_cache-6176f083-b61a-40d6-90a0-680b628a1e08" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.640616] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "refresh_cache-6176f083-b61a-40d6-90a0-680b628a1e08" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.640616] env[62820]: DEBUG nova.network.neutron [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1360.675973] env[62820]: DEBUG nova.network.neutron [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Updated VIF entry in instance network info cache for port b5622bc1-fd38-457a-9f31-249b2c1721ce. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1360.676960] env[62820]: DEBUG nova.network.neutron [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Updating instance_info_cache with network_info: [{"id": "b5622bc1-fd38-457a-9f31-249b2c1721ce", "address": "fa:16:3e:c3:4c:f6", "network": {"id": "cd549f33-30d7-42c9-93f3-fef740c084f5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-998216747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561ded662f04b3eb420b60ca3345771", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5622bc1-fd", "ovs_interfaceid": "b5622bc1-fd38-457a-9f31-249b2c1721ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.782397] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695189, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.958637] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695188, 'name': ReconfigVM_Task, 'duration_secs': 0.746379} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.959845] env[62820]: DEBUG nova.compute.manager [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Received event network-vif-plugged-a033acfd-d3ec-4c0f-a248-fb38c4e3533b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1360.960059] env[62820]: DEBUG oslo_concurrency.lockutils [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] Acquiring lock "6176f083-b61a-40d6-90a0-680b628a1e08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.960167] env[62820]: DEBUG oslo_concurrency.lockutils [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] Lock "6176f083-b61a-40d6-90a0-680b628a1e08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.960402] env[62820]: DEBUG oslo_concurrency.lockutils [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] Lock "6176f083-b61a-40d6-90a0-680b628a1e08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.960575] env[62820]: DEBUG nova.compute.manager [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] No waiting events found dispatching network-vif-plugged-a033acfd-d3ec-4c0f-a248-fb38c4e3533b {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1360.960979] env[62820]: WARNING nova.compute.manager [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Received unexpected event network-vif-plugged-a033acfd-d3ec-4c0f-a248-fb38c4e3533b for instance with vm_state building and task_state spawning. [ 1360.961384] env[62820]: DEBUG nova.compute.manager [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Received event network-vif-deleted-06e08e58-4a6e-47ab-a9f3-e152d75f1cd0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1360.961605] env[62820]: DEBUG nova.compute.manager [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Received event network-changed-a033acfd-d3ec-4c0f-a248-fb38c4e3533b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1360.961772] env[62820]: DEBUG nova.compute.manager [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Refreshing instance network info cache due to event network-changed-a033acfd-d3ec-4c0f-a248-fb38c4e3533b. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1360.961940] env[62820]: DEBUG oslo_concurrency.lockutils [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] Acquiring lock "refresh_cache-6176f083-b61a-40d6-90a0-680b628a1e08" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.962314] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 15e95a20-2729-46c6-a613-32aa353ed329/15e95a20-2729-46c6-a613-32aa353ed329.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1360.963097] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d209ff65-498f-4625-ab63-2eec37e27815 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.969689] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1360.969689] env[62820]: value = "task-1695190" [ 1360.969689] env[62820]: _type = "Task" [ 1360.969689] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.978706] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695190, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.019082] env[62820]: INFO nova.virt.block_device [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Booting with volume 763afde5-c692-44d0-a083-7f09ae379a22 at /dev/sda [ 1361.060395] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4c91ac0-f343-4fa5-854c-1e2467eaf750 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.070357] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51bd15c-1eea-4d72-84d6-bccc00bbfbad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.100495] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ad49625-5c9d-4d02-84b4-63406d4de362 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.106877] env[62820]: INFO nova.compute.manager [-] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Took 1.45 seconds to deallocate network for instance. [ 1361.107046] env[62820]: DEBUG nova.scheduler.client.report [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1361.114045] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2278330-8efb-466f-b3da-645ec7c06cc2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.151207] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e3b394-fbb0-4cf6-881d-b19907ae0eb7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.159763] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d284b08b-02c1-49ed-a167-83ae99509300 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.173255] env[62820]: DEBUG nova.virt.block_device [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Updating existing volume attachment record: fd961c45-dd86-4400-9054-5b6a1763c3c3 {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1361.178814] env[62820]: DEBUG oslo_concurrency.lockutils [req-ede19bd1-c771-4f7d-85f0-c5f9be84c788 req-3bd3fac6-3997-45d3-8281-5211cbc893e6 service nova] Releasing lock "refresh_cache-a06d736c-a704-46e8-a6f7-85d8be40804f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.186891] env[62820]: DEBUG nova.network.neutron [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1361.280395] env[62820]: DEBUG oslo_vmware.api [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695189, 'name': RemoveSnapshot_Task, 'duration_secs': 0.629987} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.280817] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1361.280911] env[62820]: INFO nova.compute.manager [None req-567b9cfa-9cb7-4225-a917-234dd004a67c tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Took 17.76 seconds to snapshot the instance on the hypervisor. [ 1361.363682] env[62820]: DEBUG nova.network.neutron [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Updating instance_info_cache with network_info: [{"id": "a033acfd-d3ec-4c0f-a248-fb38c4e3533b", "address": "fa:16:3e:14:59:c7", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa033acfd-d3", "ovs_interfaceid": "a033acfd-d3ec-4c0f-a248-fb38c4e3533b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.480060] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695190, 'name': Rename_Task, 'duration_secs': 0.406344} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.481272] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.481272] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c9fa2ff-c235-46b8-a086-4a59c5a6091b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.486754] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1361.486754] env[62820]: value = "task-1695191" [ 1361.486754] env[62820]: _type = "Task" [ 1361.486754] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.495214] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695191, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.628348] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.134s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.631318] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.633505] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.384s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.633505] env[62820]: INFO nova.compute.claims [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1361.670655] env[62820]: INFO nova.scheduler.client.report [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Deleted allocations for instance 7c5d1740-92ba-4d4b-a557-10f8ea58e883 [ 1361.866970] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "refresh_cache-6176f083-b61a-40d6-90a0-680b628a1e08" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.867409] env[62820]: DEBUG nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Instance network_info: |[{"id": "a033acfd-d3ec-4c0f-a248-fb38c4e3533b", "address": "fa:16:3e:14:59:c7", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa033acfd-d3", "ovs_interfaceid": "a033acfd-d3ec-4c0f-a248-fb38c4e3533b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1361.867802] env[62820]: DEBUG oslo_concurrency.lockutils [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] Acquired lock "refresh_cache-6176f083-b61a-40d6-90a0-680b628a1e08" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.867987] env[62820]: DEBUG nova.network.neutron [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Refreshing network info cache for port a033acfd-d3ec-4c0f-a248-fb38c4e3533b {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1361.869218] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:59:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a033acfd-d3ec-4c0f-a248-fb38c4e3533b', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1361.877544] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating folder: Project (6bdc42fe98fb43d7bd92e2dd789aff93). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1361.878400] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a90363d0-d696-4582-a382-54af75676063 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.891784] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Created folder: Project (6bdc42fe98fb43d7bd92e2dd789aff93) in parent group-v353379. [ 1361.891991] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating folder: Instances. Parent ref: group-v353445. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1361.892259] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f0f9399-e00a-41af-ab07-4491d946f6db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.895907] env[62820]: DEBUG nova.compute.manager [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1361.896816] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0231876b-803d-4d7a-9ef5-85db63e82ee1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.908279] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Created folder: Instances in parent group-v353445. [ 1361.908414] env[62820]: DEBUG oslo.service.loopingcall [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1361.909691] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1361.913847] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dabe6a33-69ea-4444-a092-d69071d17d93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.942252] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1361.942252] env[62820]: value = "task-1695194" [ 1361.942252] env[62820]: _type = "Task" [ 1361.942252] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.950321] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695194, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.998216] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695191, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.181784] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1305e089-4424-4d6e-853b-77465b1648c2 tempest-ServerMetadataNegativeTestJSON-1398534677 tempest-ServerMetadataNegativeTestJSON-1398534677-project-member] Lock "7c5d1740-92ba-4d4b-a557-10f8ea58e883" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.016s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.203270] env[62820]: DEBUG nova.network.neutron [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Successfully updated port: 5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1362.262496] env[62820]: DEBUG nova.compute.manager [req-25de738f-61bb-4406-8839-664043a708c7 req-d896b3b3-17f3-4a93-b97e-c42165375acb service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Received event network-vif-plugged-5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1362.262764] env[62820]: DEBUG oslo_concurrency.lockutils [req-25de738f-61bb-4406-8839-664043a708c7 req-d896b3b3-17f3-4a93-b97e-c42165375acb service nova] Acquiring lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.262992] env[62820]: DEBUG oslo_concurrency.lockutils [req-25de738f-61bb-4406-8839-664043a708c7 req-d896b3b3-17f3-4a93-b97e-c42165375acb service nova] Lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.263191] env[62820]: DEBUG oslo_concurrency.lockutils [req-25de738f-61bb-4406-8839-664043a708c7 req-d896b3b3-17f3-4a93-b97e-c42165375acb service nova] Lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.263392] env[62820]: DEBUG nova.compute.manager [req-25de738f-61bb-4406-8839-664043a708c7 req-d896b3b3-17f3-4a93-b97e-c42165375acb service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] No waiting events found dispatching network-vif-plugged-5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1362.263565] env[62820]: WARNING nova.compute.manager [req-25de738f-61bb-4406-8839-664043a708c7 req-d896b3b3-17f3-4a93-b97e-c42165375acb service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Received unexpected event network-vif-plugged-5d1e82ae-c035-4664-9764-24afac8896b1 for instance with vm_state building and task_state block_device_mapping. [ 1362.414341] env[62820]: INFO nova.compute.manager [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] instance snapshotting [ 1362.417638] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1349693-d628-4dbc-9170-0e01ec0016fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.438589] env[62820]: DEBUG nova.compute.manager [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1362.439537] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91119e1b-c37b-456d-9d5a-eca175eeeb67 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.442461] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64aa434e-f5e8-4679-8872-a9f35800d3ea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.457315] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695194, 'name': CreateVM_Task, 'duration_secs': 0.490026} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.459596] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1362.460411] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.460561] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.460884] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1362.461122] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceb4bebc-0ea4-410d-ac4f-a2a8d69773bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.466467] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1362.466467] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52fbb5dd-243d-63a0-1ba2-0a851eba8e76" [ 1362.466467] env[62820]: _type = "Task" [ 1362.466467] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.475998] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fbb5dd-243d-63a0-1ba2-0a851eba8e76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.497026] env[62820]: DEBUG oslo_vmware.api [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695191, 'name': PowerOnVM_Task, 'duration_secs': 0.826893} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.498423] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.498631] env[62820]: INFO nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Took 12.48 seconds to spawn the instance on the hypervisor. [ 1362.498810] env[62820]: DEBUG nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1362.499586] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431f7257-9aa1-4ecf-a64e-1bde62449788 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.673308] env[62820]: DEBUG nova.network.neutron [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Updated VIF entry in instance network info cache for port a033acfd-d3ec-4c0f-a248-fb38c4e3533b. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1362.675378] env[62820]: DEBUG nova.network.neutron [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Updating instance_info_cache with network_info: [{"id": "a033acfd-d3ec-4c0f-a248-fb38c4e3533b", "address": "fa:16:3e:14:59:c7", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa033acfd-d3", "ovs_interfaceid": "a033acfd-d3ec-4c0f-a248-fb38c4e3533b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.709492] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Acquiring lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.709642] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Acquired lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.709825] env[62820]: DEBUG nova.network.neutron [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1362.963086] env[62820]: INFO nova.compute.manager [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] instance snapshotting [ 1362.965331] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1362.966266] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8dcf03c5-7e57-452c-922b-e8e85b174655 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.969882] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad1ee0e-f93b-443f-8c1f-4c8050ebdd81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.980633] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1362.980633] env[62820]: value = "task-1695195" [ 1362.980633] env[62820]: _type = "Task" [ 1362.980633] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.999159] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fbb5dd-243d-63a0-1ba2-0a851eba8e76, 'name': SearchDatastore_Task, 'duration_secs': 0.011876} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.004991] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.005356] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1363.005701] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.005868] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.006324] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1363.007055] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fa0749-e86d-4268-9b22-12143760906f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.009985] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45a3c382-dc38-4035-a048-fb55f88ec343 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.020176] env[62820]: INFO nova.compute.manager [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Took 37.65 seconds to build instance. [ 1363.030269] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695195, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.036902] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1363.036902] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1363.036902] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e05c1e2e-85a7-43e7-952c-58b9bd6880ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.042784] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1363.042784] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e6a353-fa06-7bc1-b2f6-dad2c5c51737" [ 1363.042784] env[62820]: _type = "Task" [ 1363.042784] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.053681] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e6a353-fa06-7bc1-b2f6-dad2c5c51737, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.126765] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd10e4ff-acfc-4ae9-ac92-37dd7702a81f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.134992] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861356b7-7b16-4b4f-900d-4a4dbc09fce7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.168150] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4081ef86-fad5-405f-a865-e69b00174609 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.176247] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf1907e-4b6b-4ff9-975e-a8a4a6e4ccb4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.184256] env[62820]: DEBUG oslo_concurrency.lockutils [req-72601f55-c83b-44df-aee0-d6524bf9c174 req-eb0fac1d-2445-4b9d-8c78-06b9417af036 service nova] Releasing lock "refresh_cache-6176f083-b61a-40d6-90a0-680b628a1e08" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.193688] env[62820]: DEBUG nova.compute.provider_tree [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.252676] env[62820]: DEBUG nova.network.neutron [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1363.300726] env[62820]: DEBUG nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1363.301293] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1363.301526] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1363.301684] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1363.301868] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1363.302024] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1363.302176] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1363.302399] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1363.302556] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1363.302734] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1363.302914] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1363.303190] env[62820]: DEBUG nova.virt.hardware [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1363.304377] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261a4d14-90a2-41b3-acf0-9f9b10ed0b14 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.313296] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644ce330-da83-4268-a761-cc8d8d2c2c61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.401651] env[62820]: DEBUG nova.network.neutron [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Updating instance_info_cache with network_info: [{"id": "5d1e82ae-c035-4664-9764-24afac8896b1", "address": "fa:16:3e:8b:75:20", "network": {"id": "6bdb14c5-5bf2-41e5-b7d6-56a2da43f416", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-781910607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3edacaf37e34169a73932db948fa6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c06e3c2-8edb-4cf0-be6b-45dfe059c00b", "external-id": "nsx-vlan-transportzone-264", "segmentation_id": 264, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d1e82ae-c0", "ovs_interfaceid": "5d1e82ae-c035-4664-9764-24afac8896b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.511452] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695195, 'name': CreateSnapshot_Task, 'duration_secs': 0.504285} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.511872] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1363.513181] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3d1963-4fdb-442b-adc9-53c657e5bd0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.536794] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1363.536794] env[62820]: DEBUG oslo_concurrency.lockutils [None req-28abe8d1-aeb0-4e96-ac7a-19cf15c0c002 tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "15e95a20-2729-46c6-a613-32aa353ed329" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.503s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.536794] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-337cbe56-8c51-4478-9c81-7156409b63dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.548491] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1363.548491] env[62820]: value = "task-1695196" [ 1363.548491] env[62820]: _type = "Task" [ 1363.548491] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.556694] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e6a353-fa06-7bc1-b2f6-dad2c5c51737, 'name': SearchDatastore_Task, 'duration_secs': 0.010283} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.558272] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd121faf-8b6a-492f-a072-c2a33d4a1562 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.566453] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695196, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.569927] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1363.569927] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52abace2-d340-21d0-79ec-73e2e0462036" [ 1363.569927] env[62820]: _type = "Task" [ 1363.569927] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.579924] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52abace2-d340-21d0-79ec-73e2e0462036, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.699123] env[62820]: DEBUG nova.scheduler.client.report [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1363.903639] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Releasing lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.903993] env[62820]: DEBUG nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Instance network_info: |[{"id": "5d1e82ae-c035-4664-9764-24afac8896b1", "address": "fa:16:3e:8b:75:20", "network": {"id": "6bdb14c5-5bf2-41e5-b7d6-56a2da43f416", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-781910607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3edacaf37e34169a73932db948fa6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c06e3c2-8edb-4cf0-be6b-45dfe059c00b", "external-id": "nsx-vlan-transportzone-264", "segmentation_id": 264, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d1e82ae-c0", "ovs_interfaceid": "5d1e82ae-c035-4664-9764-24afac8896b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1363.904529] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:75:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c06e3c2-8edb-4cf0-be6b-45dfe059c00b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d1e82ae-c035-4664-9764-24afac8896b1', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1363.912148] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Creating folder: Project (b3edacaf37e34169a73932db948fa6d9). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1363.912452] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b91412d-8e76-428c-9f81-9bfe0458011f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.926817] env[62820]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1363.926817] env[62820]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62820) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1363.926817] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Folder already exists: Project (b3edacaf37e34169a73932db948fa6d9). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1363.927013] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Creating folder: Instances. Parent ref: group-v353386. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1363.927274] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32e4f995-cde5-4fb7-9cc4-43cf8bb72196 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.935970] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Created folder: Instances in parent group-v353386. [ 1363.936284] env[62820]: DEBUG oslo.service.loopingcall [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1363.936519] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1363.936728] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e813fc5-f34e-49fa-893f-04cd30134ad0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.955947] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1363.955947] env[62820]: value = "task-1695199" [ 1363.955947] env[62820]: _type = "Task" [ 1363.955947] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.964482] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695199, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.032941] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1364.033642] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7312e166-2b5a-43ac-9b5e-a1596cec3e14 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.041217] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1364.041217] env[62820]: value = "task-1695200" [ 1364.041217] env[62820]: _type = "Task" [ 1364.041217] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.042057] env[62820]: DEBUG nova.compute.manager [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1364.076067] env[62820]: DEBUG nova.compute.manager [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1364.076669] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695196, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.077008] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695200, 'name': CloneVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.077829] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d0e909-8703-4d03-9fb0-d5833cdfd74d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.090597] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52abace2-d340-21d0-79ec-73e2e0462036, 'name': SearchDatastore_Task, 'duration_secs': 0.009725} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.092762] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.092762] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 6176f083-b61a-40d6-90a0-680b628a1e08/6176f083-b61a-40d6-90a0-680b628a1e08.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1364.095466] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee9167c4-c712-4b79-9d8d-ac62c9038e5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.103093] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1364.103093] env[62820]: value = "task-1695201" [ 1364.103093] env[62820]: _type = "Task" [ 1364.103093] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.113755] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.204521] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.205275] env[62820]: DEBUG nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1364.208352] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 22.121s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.208352] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.208352] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1364.208520] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.886s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.209186] env[62820]: DEBUG nova.objects.instance [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lazy-loading 'resources' on Instance uuid b7c52283-eada-47fd-887f-a5ad94a0583a {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1364.212111] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4684604c-7182-45b1-af80-0c2e6e93640a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.221491] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7947ada-2ff2-4637-ba29-461bfde08c5b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.242657] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f776014c-9108-42fe-963b-2a560839de00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.251832] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c42d03-89a7-4928-909f-68a8ce2b2f66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.287539] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179974MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1364.287822] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.413701] env[62820]: DEBUG nova.compute.manager [req-40317f29-7f1a-4bd3-93b7-fa2d805bd8be req-36014df6-00b5-40ee-87dd-234c69850b44 service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Received event network-changed-5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1364.413701] env[62820]: DEBUG nova.compute.manager [req-40317f29-7f1a-4bd3-93b7-fa2d805bd8be req-36014df6-00b5-40ee-87dd-234c69850b44 service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Refreshing instance network info cache due to event network-changed-5d1e82ae-c035-4664-9764-24afac8896b1. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1364.413903] env[62820]: DEBUG oslo_concurrency.lockutils [req-40317f29-7f1a-4bd3-93b7-fa2d805bd8be req-36014df6-00b5-40ee-87dd-234c69850b44 service nova] Acquiring lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.414104] env[62820]: DEBUG oslo_concurrency.lockutils [req-40317f29-7f1a-4bd3-93b7-fa2d805bd8be req-36014df6-00b5-40ee-87dd-234c69850b44 service nova] Acquired lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.414276] env[62820]: DEBUG nova.network.neutron [req-40317f29-7f1a-4bd3-93b7-fa2d805bd8be req-36014df6-00b5-40ee-87dd-234c69850b44 service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Refreshing network info cache for port 5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1364.469015] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695199, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.556746] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695200, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.571508] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695196, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.580516] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.600348] env[62820]: INFO nova.compute.manager [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] instance snapshotting [ 1364.604597] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a874835e-259f-4ebe-8f92-90ff09f8b2e5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.619814] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695201, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.639029] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333acf86-b720-45f6-bf90-d9f667170483 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.713953] env[62820]: DEBUG nova.compute.utils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1364.716449] env[62820]: DEBUG nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1364.716605] env[62820]: DEBUG nova.network.neutron [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1364.782516] env[62820]: DEBUG nova.policy [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73aed58c53374fbeb387a0ee704dc2f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74f3dd3dcc10421f803a0039e3add051', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1364.968956] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695199, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.054533] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695200, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.067181] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695196, 'name': CreateSnapshot_Task, 'duration_secs': 1.189786} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.067542] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1365.068489] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323656d7-b2d6-43b0-8088-873d980e63b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.114126] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569309} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.114606] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 6176f083-b61a-40d6-90a0-680b628a1e08/6176f083-b61a-40d6-90a0-680b628a1e08.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1365.114862] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1365.115137] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57f5649f-9b96-4615-ae1a-0a535846dcaf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.126148] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1365.126148] env[62820]: value = "task-1695202" [ 1365.126148] env[62820]: _type = "Task" [ 1365.126148] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.134605] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695202, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.158034] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1365.158034] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-64ad439e-977b-4170-8823-7ba482eae2cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.165174] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1365.165174] env[62820]: value = "task-1695203" [ 1365.165174] env[62820]: _type = "Task" [ 1365.165174] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.176535] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695203, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.216239] env[62820]: DEBUG nova.network.neutron [req-40317f29-7f1a-4bd3-93b7-fa2d805bd8be req-36014df6-00b5-40ee-87dd-234c69850b44 service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Updated VIF entry in instance network info cache for port 5d1e82ae-c035-4664-9764-24afac8896b1. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1365.216806] env[62820]: DEBUG nova.network.neutron [req-40317f29-7f1a-4bd3-93b7-fa2d805bd8be req-36014df6-00b5-40ee-87dd-234c69850b44 service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Updating instance_info_cache with network_info: [{"id": "5d1e82ae-c035-4664-9764-24afac8896b1", "address": "fa:16:3e:8b:75:20", "network": {"id": "6bdb14c5-5bf2-41e5-b7d6-56a2da43f416", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-781910607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3edacaf37e34169a73932db948fa6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c06e3c2-8edb-4cf0-be6b-45dfe059c00b", "external-id": "nsx-vlan-transportzone-264", "segmentation_id": 264, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d1e82ae-c0", "ovs_interfaceid": "5d1e82ae-c035-4664-9764-24afac8896b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.219459] env[62820]: DEBUG nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1365.270760] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80376d19-5187-4e9e-b4e9-75f78307ee8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.280824] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbc56c7-0869-4ffe-80a3-ed74919f289e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.284778] env[62820]: DEBUG nova.network.neutron [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Successfully created port: 507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1365.316479] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ae69d5-6e50-40b1-adb4-f33cbf88f50c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.324619] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742262a0-d855-43ea-aa64-4e367b773a1a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.344791] env[62820]: DEBUG nova.compute.provider_tree [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1365.468998] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695199, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.553198] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695200, 'name': CloneVM_Task, 'duration_secs': 1.366954} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.553560] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Created linked-clone VM from snapshot [ 1365.554347] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8758ef-7301-42ed-811b-73d18833afba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.562194] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Uploading image 54b0a9e3-ab0a-4965-8fe7-9b749de83374 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1365.587436] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1365.589687] env[62820]: DEBUG oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1365.589687] env[62820]: value = "vm-353451" [ 1365.589687] env[62820]: _type = "VirtualMachine" [ 1365.589687] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1365.589972] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ff903ccb-c493-4fa2-8b02-a76f216fe2d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.595310] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-56f89509-1af1-439c-b9a8-dd20209d9b99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.606495] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1365.606495] env[62820]: value = "task-1695204" [ 1365.606495] env[62820]: _type = "Task" [ 1365.606495] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.608213] env[62820]: DEBUG oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lease: (returnval){ [ 1365.608213] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cffe86-cb32-4528-10ac-399209a9529b" [ 1365.608213] env[62820]: _type = "HttpNfcLease" [ 1365.608213] env[62820]: } obtained for exporting VM: (result){ [ 1365.608213] env[62820]: value = "vm-353451" [ 1365.608213] env[62820]: _type = "VirtualMachine" [ 1365.608213] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1365.609506] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the lease: (returnval){ [ 1365.609506] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cffe86-cb32-4528-10ac-399209a9529b" [ 1365.609506] env[62820]: _type = "HttpNfcLease" [ 1365.609506] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1365.621423] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695204, 'name': CloneVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.623123] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1365.623123] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cffe86-cb32-4528-10ac-399209a9529b" [ 1365.623123] env[62820]: _type = "HttpNfcLease" [ 1365.623123] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1365.623418] env[62820]: DEBUG oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1365.623418] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cffe86-cb32-4528-10ac-399209a9529b" [ 1365.623418] env[62820]: _type = "HttpNfcLease" [ 1365.623418] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1365.624159] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d39f9d-7801-4f05-a8dc-6b4469c0e9a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.635204] env[62820]: DEBUG oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254bd77-6922-ded4-133b-86195040aa4a/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1365.635465] env[62820]: DEBUG oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254bd77-6922-ded4-133b-86195040aa4a/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1365.639712] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695202, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093582} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.640868] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1365.698124] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6300e161-4e8d-4182-a6a4-e23601ab156d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.726655] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 6176f083-b61a-40d6-90a0-680b628a1e08/6176f083-b61a-40d6-90a0-680b628a1e08.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1365.730246] env[62820]: DEBUG oslo_concurrency.lockutils [req-40317f29-7f1a-4bd3-93b7-fa2d805bd8be req-36014df6-00b5-40ee-87dd-234c69850b44 service nova] Releasing lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.734738] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-224bc6cb-d1aa-4c38-9417-7350a2e20df8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.747928] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695203, 'name': CreateSnapshot_Task, 'duration_secs': 0.520929} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.750668] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1365.752160] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896880e0-9666-4678-a4a9-f5f1da854398 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.755155] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fc54c24b-44f9-4e14-9007-b46a58f46a38 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.757866] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1365.757866] env[62820]: value = "task-1695206" [ 1365.757866] env[62820]: _type = "Task" [ 1365.757866] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.773637] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695206, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.847972] env[62820]: DEBUG nova.scheduler.client.report [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1365.968311] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695199, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.119032] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695204, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.251907] env[62820]: DEBUG nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1366.268954] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695206, 'name': ReconfigVM_Task, 'duration_secs': 0.299414} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.271154] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 6176f083-b61a-40d6-90a0-680b628a1e08/6176f083-b61a-40d6-90a0-680b628a1e08.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1366.280078] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1366.280644] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22645377-64c0-4db8-be2e-a5aa01407ccc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.282371] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f2e0f57a-279e-40c3-956b-735af5ee4757 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.287539] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1366.287833] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1366.288093] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1366.288347] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1366.288517] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1366.288720] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1366.288977] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1366.289217] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1366.289398] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1366.289595] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1366.289822] env[62820]: DEBUG nova.virt.hardware [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1366.290825] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce766bc-2901-4c9d-8de7-6e25ae5c45b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.301895] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93676ead-2b2c-4a2e-b340-3c858e97c885 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.305842] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1366.305842] env[62820]: value = "task-1695207" [ 1366.305842] env[62820]: _type = "Task" [ 1366.305842] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.306405] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1366.306405] env[62820]: value = "task-1695208" [ 1366.306405] env[62820]: _type = "Task" [ 1366.306405] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.321576] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquiring lock "15e95a20-2729-46c6-a613-32aa353ed329" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.322036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "15e95a20-2729-46c6-a613-32aa353ed329" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.322525] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquiring lock "15e95a20-2729-46c6-a613-32aa353ed329-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.323599] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "15e95a20-2729-46c6-a613-32aa353ed329-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.323599] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "15e95a20-2729-46c6-a613-32aa353ed329-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.325418] env[62820]: INFO nova.compute.manager [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Terminating instance [ 1366.332682] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695208, 'name': CloneVM_Task} progress is 12%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.336494] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695207, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.352947] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.144s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.355487] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.665s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.357221] env[62820]: INFO nova.compute.claims [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1366.378602] env[62820]: INFO nova.scheduler.client.report [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Deleted allocations for instance b7c52283-eada-47fd-887f-a5ad94a0583a [ 1366.469808] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695199, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.627157] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695204, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.820744] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695207, 'name': Rename_Task, 'duration_secs': 0.328305} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.824040] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1366.824517] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695208, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.825214] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3846450b-8c17-41fc-8807-d16fb9452ea1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.830365] env[62820]: DEBUG nova.compute.manager [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1366.830584] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1366.832445] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e3a06a-89f3-41b2-8fd8-eab7966934b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.835304] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1366.835304] env[62820]: value = "task-1695209" [ 1366.835304] env[62820]: _type = "Task" [ 1366.835304] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.842845] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1366.844095] env[62820]: DEBUG nova.compute.manager [req-995d84b7-05ac-43c0-bd4d-db0afc54fe56 req-14fd767a-4cdb-4971-8a34-bb7c396ea5b3 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Received event network-vif-plugged-507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1366.844624] env[62820]: DEBUG oslo_concurrency.lockutils [req-995d84b7-05ac-43c0-bd4d-db0afc54fe56 req-14fd767a-4cdb-4971-8a34-bb7c396ea5b3 service nova] Acquiring lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.845049] env[62820]: DEBUG oslo_concurrency.lockutils [req-995d84b7-05ac-43c0-bd4d-db0afc54fe56 req-14fd767a-4cdb-4971-8a34-bb7c396ea5b3 service nova] Lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.845435] env[62820]: DEBUG oslo_concurrency.lockutils [req-995d84b7-05ac-43c0-bd4d-db0afc54fe56 req-14fd767a-4cdb-4971-8a34-bb7c396ea5b3 service nova] Lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.846083] env[62820]: DEBUG nova.compute.manager [req-995d84b7-05ac-43c0-bd4d-db0afc54fe56 req-14fd767a-4cdb-4971-8a34-bb7c396ea5b3 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] No waiting events found dispatching network-vif-plugged-507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1366.846083] env[62820]: WARNING nova.compute.manager [req-995d84b7-05ac-43c0-bd4d-db0afc54fe56 req-14fd767a-4cdb-4971-8a34-bb7c396ea5b3 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Received unexpected event network-vif-plugged-507956c3-f482-428d-b807-71f6d0ca9cb4 for instance with vm_state building and task_state spawning. [ 1366.847100] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e8e950d-2dc1-45ad-a160-66d96cf0d42d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.854064] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695209, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.859556] env[62820]: DEBUG oslo_vmware.api [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1366.859556] env[62820]: value = "task-1695210" [ 1366.859556] env[62820]: _type = "Task" [ 1366.859556] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.871476] env[62820]: DEBUG oslo_vmware.api [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.891974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a754e05-9f84-4110-a95f-13a4a96f629d tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "b7c52283-eada-47fd-887f-a5ad94a0583a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.117s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.969902] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695199, 'name': CreateVM_Task, 'duration_secs': 2.845652} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.970328] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1366.971625] env[62820]: DEBUG nova.network.neutron [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Successfully updated port: 507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1366.977342] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353396', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'name': 'volume-763afde5-c692-44d0-a083-7f09ae379a22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e4668ed-801a-4105-8b9e-cf37be91c8b8', 'attached_at': '', 'detached_at': '', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'serial': '763afde5-c692-44d0-a083-7f09ae379a22'}, 'attachment_id': 'fd961c45-dd86-4400-9054-5b6a1763c3c3', 'delete_on_termination': True, 'boot_index': 0, 'device_type': None, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=62820) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1366.977342] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Root volume attach. Driver type: vmdk {{(pid=62820) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1366.977342] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2c616d-df33-4d0d-b6e5-062c626d0d0e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.985384] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8b50f4-6602-4d34-b0fc-0f2e56cc451c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.993718] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5f7536-0d7d-484f-9d2b-3df50c0ce20e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.002193] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-6d9bf446-9cec-4e81-a2db-9cebfd57b6ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.011504] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1367.011504] env[62820]: value = "task-1695211" [ 1367.011504] env[62820]: _type = "Task" [ 1367.011504] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.021220] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695211, 'name': RelocateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.121293] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695204, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.319601] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695208, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.344570] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695209, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.372399] env[62820]: DEBUG oslo_vmware.api [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695210, 'name': PowerOffVM_Task, 'duration_secs': 0.225565} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.373569] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1367.375234] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1367.378321] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5605f534-29e8-4c79-91e0-096872deb599 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.459159] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1367.459395] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1367.459594] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Deleting the datastore file [datastore1] 15e95a20-2729-46c6-a613-32aa353ed329 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1367.459965] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88f1a686-40a2-49ee-acac-77a31d7124aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.467068] env[62820]: DEBUG oslo_vmware.api [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for the task: (returnval){ [ 1367.467068] env[62820]: value = "task-1695213" [ 1367.467068] env[62820]: _type = "Task" [ 1367.467068] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.479929] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.480189] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquired lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.480474] env[62820]: DEBUG nova.network.neutron [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1367.482031] env[62820]: DEBUG oslo_vmware.api [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695213, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.525402] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695211, 'name': RelocateVM_Task} progress is 20%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.630181] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695204, 'name': CloneVM_Task, 'duration_secs': 1.722274} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.630866] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Created linked-clone VM from snapshot [ 1367.631831] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dacc937-86ad-409d-aef8-06cdef3d1242 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.641051] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Uploading image 3e926d51-283e-41b2-8677-bba479fa9af9 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1367.667520] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1367.667714] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d41bb5f5-448a-4c48-acc3-ca76ae410524 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.675934] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1367.675934] env[62820]: value = "task-1695214" [ 1367.675934] env[62820]: _type = "Task" [ 1367.675934] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.691505] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695214, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.820105] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695208, 'name': CloneVM_Task, 'duration_secs': 1.265352} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.820435] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Created linked-clone VM from snapshot [ 1367.821325] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b92b7c6-6002-4ff5-b589-db19cb51eb77 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.828979] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Uploading image 5d974e0f-a07c-41af-8806-ed0ddde539f1 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1367.847531] env[62820]: DEBUG oslo_vmware.api [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695209, 'name': PowerOnVM_Task, 'duration_secs': 0.94666} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.853896] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1367.854188] env[62820]: INFO nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1367.854416] env[62820]: DEBUG nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1367.855790] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7432606f-8a52-4e91-9a26-7fb6b3152f3a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.860947] env[62820]: DEBUG oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1367.860947] env[62820]: value = "vm-353455" [ 1367.860947] env[62820]: _type = "VirtualMachine" [ 1367.860947] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1367.861230] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2ea3d34f-a8a9-4137-80ce-918800d8fbae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.876779] env[62820]: DEBUG oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lease: (returnval){ [ 1367.876779] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b0f6fc-8a1b-f182-77b8-5277cf9f7fa0" [ 1367.876779] env[62820]: _type = "HttpNfcLease" [ 1367.876779] env[62820]: } obtained for exporting VM: (result){ [ 1367.876779] env[62820]: value = "vm-353455" [ 1367.876779] env[62820]: _type = "VirtualMachine" [ 1367.876779] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1367.877026] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the lease: (returnval){ [ 1367.877026] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b0f6fc-8a1b-f182-77b8-5277cf9f7fa0" [ 1367.877026] env[62820]: _type = "HttpNfcLease" [ 1367.877026] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1367.885264] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1367.885264] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b0f6fc-8a1b-f182-77b8-5277cf9f7fa0" [ 1367.885264] env[62820]: _type = "HttpNfcLease" [ 1367.885264] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1367.909711] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd295e0-c7c0-4159-8940-43a30659266c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.917820] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96612349-8229-46a6-ba9c-60ac80d42ed4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.955502] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52d4b7b-5bdb-4f76-a283-853eb141dbda {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.968362] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9578aac2-3a87-4b11-948c-511545c24789 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.987412] env[62820]: DEBUG nova.compute.provider_tree [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.993377] env[62820]: DEBUG oslo_vmware.api [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Task: {'id': task-1695213, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225333} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.994249] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1367.994665] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1367.994958] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1367.995330] env[62820]: INFO nova.compute.manager [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1367.995716] env[62820]: DEBUG oslo.service.loopingcall [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1367.995934] env[62820]: DEBUG nova.compute.manager [-] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1367.996063] env[62820]: DEBUG nova.network.neutron [-] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1368.022213] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695211, 'name': RelocateVM_Task, 'duration_secs': 0.588384} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.022771] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1368.023075] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353396', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'name': 'volume-763afde5-c692-44d0-a083-7f09ae379a22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e4668ed-801a-4105-8b9e-cf37be91c8b8', 'attached_at': '', 'detached_at': '', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'serial': '763afde5-c692-44d0-a083-7f09ae379a22'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1368.024467] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f940a8-05aa-4614-aa91-889070033ddb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.042906] env[62820]: DEBUG nova.network.neutron [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1368.045369] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a06baa-5e7d-4adb-99d4-d05d350d7fc6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.069780] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] volume-763afde5-c692-44d0-a083-7f09ae379a22/volume-763afde5-c692-44d0-a083-7f09ae379a22.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1368.072219] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-100021f5-1ed8-4a7a-8388-5292dd6a2852 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.094340] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1368.094340] env[62820]: value = "task-1695216" [ 1368.094340] env[62820]: _type = "Task" [ 1368.094340] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.101896] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695216, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.185963] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695214, 'name': Destroy_Task, 'duration_secs': 0.383479} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.186322] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Destroyed the VM [ 1368.186632] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1368.186900] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1703f454-5a21-416a-9a89-e8a63dfda376 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.193763] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1368.193763] env[62820]: value = "task-1695217" [ 1368.193763] env[62820]: _type = "Task" [ 1368.193763] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.203738] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695217, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.376610] env[62820]: INFO nova.compute.manager [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Took 36.83 seconds to build instance. [ 1368.384530] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1368.384530] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b0f6fc-8a1b-f182-77b8-5277cf9f7fa0" [ 1368.384530] env[62820]: _type = "HttpNfcLease" [ 1368.384530] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1368.384813] env[62820]: DEBUG oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1368.384813] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b0f6fc-8a1b-f182-77b8-5277cf9f7fa0" [ 1368.384813] env[62820]: _type = "HttpNfcLease" [ 1368.384813] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1368.385596] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab85a33-3ef5-47fc-b224-68a528f185b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.399814] env[62820]: DEBUG oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5299a05e-a5c2-363e-5fcb-386e5baee05e/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1368.400674] env[62820]: DEBUG oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5299a05e-a5c2-363e-5fcb-386e5baee05e/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1368.494822] env[62820]: DEBUG nova.scheduler.client.report [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1368.529892] env[62820]: DEBUG nova.network.neutron [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [{"id": "507956c3-f482-428d-b807-71f6d0ca9cb4", "address": "fa:16:3e:ca:1a:49", "network": {"id": "22889f2c-0fd9-4556-a2e7-8647c8c1faa6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1091669222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74f3dd3dcc10421f803a0039e3add051", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507956c3-f4", "ovs_interfaceid": "507956c3-f482-428d-b807-71f6d0ca9cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.534616] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-eea22d17-efb9-4934-baac-e331e35f4dbb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.605941] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695216, 'name': ReconfigVM_Task, 'duration_secs': 0.425886} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.606294] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Reconfigured VM instance instance-00000016 to attach disk [datastore1] volume-763afde5-c692-44d0-a083-7f09ae379a22/volume-763afde5-c692-44d0-a083-7f09ae379a22.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1368.613194] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72d196c7-e565-43e5-af31-945d744b6167 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.631160] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1368.631160] env[62820]: value = "task-1695218" [ 1368.631160] env[62820]: _type = "Task" [ 1368.631160] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.641044] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695218, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.705851] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695217, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.836996] env[62820]: DEBUG nova.compute.manager [req-7da095f8-3e31-4dff-b8c2-29830792b4c6 req-c7c156eb-2786-401c-96ad-30597c9f1bc5 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Received event network-vif-deleted-1fe640fa-567b-4c64-8ce9-b029e4fe2c5f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1368.837346] env[62820]: INFO nova.compute.manager [req-7da095f8-3e31-4dff-b8c2-29830792b4c6 req-c7c156eb-2786-401c-96ad-30597c9f1bc5 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Neutron deleted interface 1fe640fa-567b-4c64-8ce9-b029e4fe2c5f; detaching it from the instance and deleting it from the info cache [ 1368.837577] env[62820]: DEBUG nova.network.neutron [req-7da095f8-3e31-4dff-b8c2-29830792b4c6 req-c7c156eb-2786-401c-96ad-30597c9f1bc5 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.882156] env[62820]: DEBUG oslo_concurrency.lockutils [None req-67708303-4232-4522-a999-4e3fba83bdf7 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "6176f083-b61a-40d6-90a0-680b628a1e08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.684s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.001387] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.001387] env[62820]: DEBUG nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1369.004596] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.041s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.007154] env[62820]: INFO nova.compute.claims [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1369.021011] env[62820]: DEBUG nova.network.neutron [-] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.035278] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Releasing lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1369.035819] env[62820]: DEBUG nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Instance network_info: |[{"id": "507956c3-f482-428d-b807-71f6d0ca9cb4", "address": "fa:16:3e:ca:1a:49", "network": {"id": "22889f2c-0fd9-4556-a2e7-8647c8c1faa6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1091669222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74f3dd3dcc10421f803a0039e3add051", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507956c3-f4", "ovs_interfaceid": "507956c3-f482-428d-b807-71f6d0ca9cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1369.039017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:1a:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca401eaa-889a-4f9f-ac9a-56b4c41bfc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '507956c3-f482-428d-b807-71f6d0ca9cb4', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.047946] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Creating folder: Project (74f3dd3dcc10421f803a0039e3add051). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1369.054483] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b750b13b-a968-4dd1-a263-dd535ada80b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.070095] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Created folder: Project (74f3dd3dcc10421f803a0039e3add051) in parent group-v353379. [ 1369.070095] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Creating folder: Instances. Parent ref: group-v353456. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1369.070095] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d143257c-0bdf-407b-ab8d-1f54564f53eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.081369] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Created folder: Instances in parent group-v353456. [ 1369.081761] env[62820]: DEBUG oslo.service.loopingcall [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1369.082094] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1369.082419] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a107f37a-c587-40fb-b7a4-41c1709a694b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.108482] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.108482] env[62820]: value = "task-1695221" [ 1369.108482] env[62820]: _type = "Task" [ 1369.108482] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.117862] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695221, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.142021] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695218, 'name': ReconfigVM_Task, 'duration_secs': 0.175012} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.142849] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353396', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'name': 'volume-763afde5-c692-44d0-a083-7f09ae379a22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e4668ed-801a-4105-8b9e-cf37be91c8b8', 'attached_at': '', 'detached_at': '', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'serial': '763afde5-c692-44d0-a083-7f09ae379a22'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1369.143458] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ad92abf-2596-4605-a0a6-be46b5cc2475 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.150616] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1369.150616] env[62820]: value = "task-1695222" [ 1369.150616] env[62820]: _type = "Task" [ 1369.150616] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.162150] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695222, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.196253] env[62820]: DEBUG nova.compute.manager [req-42142141-e07f-4933-b286-784fce956cf3 req-8eb7fe09-b056-46b1-9fcf-0f714b5bf858 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Received event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1369.196575] env[62820]: DEBUG nova.compute.manager [req-42142141-e07f-4933-b286-784fce956cf3 req-8eb7fe09-b056-46b1-9fcf-0f714b5bf858 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing instance network info cache due to event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1369.196977] env[62820]: DEBUG oslo_concurrency.lockutils [req-42142141-e07f-4933-b286-784fce956cf3 req-8eb7fe09-b056-46b1-9fcf-0f714b5bf858 service nova] Acquiring lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.197343] env[62820]: DEBUG oslo_concurrency.lockutils [req-42142141-e07f-4933-b286-784fce956cf3 req-8eb7fe09-b056-46b1-9fcf-0f714b5bf858 service nova] Acquired lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.197547] env[62820]: DEBUG nova.network.neutron [req-42142141-e07f-4933-b286-784fce956cf3 req-8eb7fe09-b056-46b1-9fcf-0f714b5bf858 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1369.210682] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695217, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.340719] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ed44cd8-6972-4b99-88fa-0a67efd8daac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.350118] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33898075-d664-4af7-88ab-e2f3f20aeae0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.393068] env[62820]: DEBUG nova.compute.manager [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1369.398099] env[62820]: DEBUG nova.compute.manager [req-7da095f8-3e31-4dff-b8c2-29830792b4c6 req-c7c156eb-2786-401c-96ad-30597c9f1bc5 service nova] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Detach interface failed, port_id=1fe640fa-567b-4c64-8ce9-b029e4fe2c5f, reason: Instance 15e95a20-2729-46c6-a613-32aa353ed329 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1369.512791] env[62820]: DEBUG nova.compute.utils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1369.517680] env[62820]: DEBUG nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1369.517680] env[62820]: DEBUG nova.network.neutron [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1369.523686] env[62820]: INFO nova.compute.manager [-] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Took 1.53 seconds to deallocate network for instance. [ 1369.563608] env[62820]: DEBUG nova.policy [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '930d73edc460442fb50a878bb76b9200', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05ec4bd31e8941ee9fc67f16798a9227', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1369.620153] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695221, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.662646] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695222, 'name': Rename_Task, 'duration_secs': 0.175996} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.663212] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1369.663937] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-452a9ac6-d416-4ca8-8bce-a9517d5e7a04 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.671730] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1369.671730] env[62820]: value = "task-1695223" [ 1369.671730] env[62820]: _type = "Task" [ 1369.671730] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.684120] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695223, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.708522] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695217, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.872133] env[62820]: DEBUG nova.network.neutron [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Successfully created port: 29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1369.909373] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "6176f083-b61a-40d6-90a0-680b628a1e08" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.909636] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "6176f083-b61a-40d6-90a0-680b628a1e08" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.909817] env[62820]: DEBUG nova.compute.manager [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1369.910959] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc7dfac-aa4e-441a-af8b-8cf9e00a3d38 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.918992] env[62820]: DEBUG nova.compute.manager [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1369.919740] env[62820]: DEBUG nova.objects.instance [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lazy-loading 'flavor' on Instance uuid 6176f083-b61a-40d6-90a0-680b628a1e08 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1369.922524] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.930893] env[62820]: DEBUG nova.network.neutron [req-42142141-e07f-4933-b286-784fce956cf3 req-8eb7fe09-b056-46b1-9fcf-0f714b5bf858 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updated VIF entry in instance network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1369.932021] env[62820]: DEBUG nova.network.neutron [req-42142141-e07f-4933-b286-784fce956cf3 req-8eb7fe09-b056-46b1-9fcf-0f714b5bf858 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [{"id": "507956c3-f482-428d-b807-71f6d0ca9cb4", "address": "fa:16:3e:ca:1a:49", "network": {"id": "22889f2c-0fd9-4556-a2e7-8647c8c1faa6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1091669222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74f3dd3dcc10421f803a0039e3add051", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507956c3-f4", "ovs_interfaceid": "507956c3-f482-428d-b807-71f6d0ca9cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.018051] env[62820]: DEBUG nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1370.031766] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.119931] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695221, 'name': CreateVM_Task, 'duration_secs': 0.526382} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.122879] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1370.123840] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.125020] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.125390] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1370.125664] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d166b62a-4255-451f-bc88-7179db14d8d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.133105] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1370.133105] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b1a805-3e46-b216-c2b8-1b8e1202b51d" [ 1370.133105] env[62820]: _type = "Task" [ 1370.133105] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.146916] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b1a805-3e46-b216-c2b8-1b8e1202b51d, 'name': SearchDatastore_Task, 'duration_secs': 0.00983} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.147473] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.147708] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1370.147939] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.148191] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.148539] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1370.148856] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86d69f05-fa25-4add-9efd-37b6f6f718b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.156951] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1370.157223] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1370.160715] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b56ba8c-052c-4e0d-b2e3-23188f3639d0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.166823] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1370.166823] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529ec710-c9f1-b7c5-3058-14d113c9f47c" [ 1370.166823] env[62820]: _type = "Task" [ 1370.166823] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.174858] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529ec710-c9f1-b7c5-3058-14d113c9f47c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.186422] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695223, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.203377] env[62820]: DEBUG oslo_vmware.api [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695217, 'name': RemoveSnapshot_Task, 'duration_secs': 1.574431} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.206542] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1370.435705] env[62820]: DEBUG oslo_concurrency.lockutils [req-42142141-e07f-4933-b286-784fce956cf3 req-8eb7fe09-b056-46b1-9fcf-0f714b5bf858 service nova] Releasing lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.491502] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0249e35f-d840-485b-b92b-b46e5cd86cf0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.499683] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7a08a0-24d6-48a3-9ca2-e98c81b92625 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.537312] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ae521a-4ccd-48dc-baf1-13fb02345304 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.545214] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138c0404-ac3d-470b-bd6d-18be28bac169 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.560102] env[62820]: DEBUG nova.compute.provider_tree [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.686252] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529ec710-c9f1-b7c5-3058-14d113c9f47c, 'name': SearchDatastore_Task, 'duration_secs': 0.010256} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.688112] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b01414f-b873-4a70-aca4-ff0124fcfa7f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.697133] env[62820]: DEBUG oslo_vmware.api [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695223, 'name': PowerOnVM_Task, 'duration_secs': 0.713132} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.698169] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1370.698574] env[62820]: INFO nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Took 7.40 seconds to spawn the instance on the hypervisor. [ 1370.698914] env[62820]: DEBUG nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1370.701809] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58662bd6-4c2b-42aa-a6bb-c4112ad2f948 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.706446] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1370.706446] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b1fef8-0206-3bb1-4d1e-f5e7e25c9458" [ 1370.706446] env[62820]: _type = "Task" [ 1370.706446] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.717710] env[62820]: WARNING nova.compute.manager [None req-c7784805-b1e5-47f6-ace1-c6cff25cf573 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Image not found during snapshot: nova.exception.ImageNotFound: Image 3e926d51-283e-41b2-8677-bba479fa9af9 could not be found. [ 1370.731514] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b1fef8-0206-3bb1-4d1e-f5e7e25c9458, 'name': SearchDatastore_Task, 'duration_secs': 0.012187} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.731881] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.732306] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89/cc2b0ed5-b711-487d-8bfc-ee2745c9ef89.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1370.732710] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc18b2b8-f550-4338-a08d-a01a551fec5c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.742294] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1370.742294] env[62820]: value = "task-1695224" [ 1370.742294] env[62820]: _type = "Task" [ 1370.742294] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.754112] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.929641] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1370.930071] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3366854f-0e7c-47af-bbd1-ee67cdbf4207 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.940470] env[62820]: DEBUG oslo_vmware.api [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1370.940470] env[62820]: value = "task-1695225" [ 1370.940470] env[62820]: _type = "Task" [ 1370.940470] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.951209] env[62820]: DEBUG oslo_vmware.api [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.039311] env[62820]: DEBUG nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1371.063019] env[62820]: DEBUG nova.scheduler.client.report [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1371.238958] env[62820]: INFO nova.compute.manager [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Took 34.12 seconds to build instance. [ 1371.255777] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695224, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.445594] env[62820]: DEBUG nova.network.neutron [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Successfully updated port: 29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1371.452593] env[62820]: DEBUG oslo_vmware.api [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695225, 'name': PowerOffVM_Task, 'duration_secs': 0.391427} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.452870] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1371.453077] env[62820]: DEBUG nova.compute.manager [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1371.453924] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cb4799-ae71-43e1-a842-83aa6a8a28f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.568575] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.569125] env[62820]: DEBUG nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1371.571802] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.125s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.573376] env[62820]: INFO nova.compute.claims [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1371.740706] env[62820]: DEBUG oslo_concurrency.lockutils [None req-07efb194-11e3-4722-8f32-ceb756b619c2 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.303s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.755025] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695224, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572694} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.755411] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89/cc2b0ed5-b711-487d-8bfc-ee2745c9ef89.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1371.755665] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1371.755926] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54ee0805-f0c3-42ca-9f17-8e6088f56f23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.765128] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1371.765128] env[62820]: value = "task-1695226" [ 1371.765128] env[62820]: _type = "Task" [ 1371.765128] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.773800] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.949021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquiring lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.949393] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquired lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.949393] env[62820]: DEBUG nova.network.neutron [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1371.967183] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ccb26119-cb4b-4421-aff7-8dccab4702be tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "6176f083-b61a-40d6-90a0-680b628a1e08" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.078710] env[62820]: DEBUG nova.compute.utils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1372.082501] env[62820]: DEBUG nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1372.082719] env[62820]: DEBUG nova.network.neutron [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1372.124479] env[62820]: DEBUG nova.policy [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf77578635f74b52970b2d7580c1bfd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b9015dc7894a1d98bf0bb73bdf7636', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1372.244451] env[62820]: DEBUG nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1372.277018] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068455} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.278777] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1372.279633] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067bce2f-4dfd-4789-a6e3-23cfb9d73766 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.313284] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89/cc2b0ed5-b711-487d-8bfc-ee2745c9ef89.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1372.314320] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49186052-50cd-4825-a713-30dfbbd913a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.334832] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1372.334832] env[62820]: value = "task-1695227" [ 1372.334832] env[62820]: _type = "Task" [ 1372.334832] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.343514] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.386475] env[62820]: DEBUG nova.network.neutron [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Successfully created port: 66676266-bbc8-4add-aeb0-77fc22873d87 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1372.487023] env[62820]: DEBUG nova.network.neutron [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1372.587020] env[62820]: DEBUG nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1372.641234] env[62820]: DEBUG nova.network.neutron [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Updating instance_info_cache with network_info: [{"id": "29363f02-2acb-4e52-8db8-f9743ec7fb99", "address": "fa:16:3e:ef:6d:5b", "network": {"id": "637634a0-8c6d-4e06-945b-58f2e86d4edf", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2105723280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05ec4bd31e8941ee9fc67f16798a9227", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29363f02-2a", "ovs_interfaceid": "29363f02-2acb-4e52-8db8-f9743ec7fb99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.765696] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.846221] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695227, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.014597] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694d4096-9063-4888-bd15-21acc958f952 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.022665] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b542227c-bce3-4aeb-a435-55e5c07abaf4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.053673] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d3f92c-667b-416d-9588-62e121f7c62d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.061818] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aad5b8a-0966-44c9-be01-ecae0e80b12d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.076192] env[62820]: DEBUG nova.compute.provider_tree [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.145033] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Releasing lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.145322] env[62820]: DEBUG nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Instance network_info: |[{"id": "29363f02-2acb-4e52-8db8-f9743ec7fb99", "address": "fa:16:3e:ef:6d:5b", "network": {"id": "637634a0-8c6d-4e06-945b-58f2e86d4edf", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2105723280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05ec4bd31e8941ee9fc67f16798a9227", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29363f02-2a", "ovs_interfaceid": "29363f02-2acb-4e52-8db8-f9743ec7fb99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1373.346545] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695227, 'name': ReconfigVM_Task, 'duration_secs': 0.535654} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.346848] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Reconfigured VM instance instance-00000017 to attach disk [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89/cc2b0ed5-b711-487d-8bfc-ee2745c9ef89.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1373.347610] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7489d56f-e6a6-4f9b-b82a-b463ddde8eb0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.355360] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1373.355360] env[62820]: value = "task-1695228" [ 1373.355360] env[62820]: _type = "Task" [ 1373.355360] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.365664] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695228, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.579305] env[62820]: DEBUG nova.scheduler.client.report [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1373.599284] env[62820]: DEBUG nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1373.866600] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695228, 'name': Rename_Task, 'duration_secs': 0.215396} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.866600] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1373.866828] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae8fff27-58c5-44db-a786-1f057888ecf1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.873463] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1373.873463] env[62820]: value = "task-1695229" [ 1373.873463] env[62820]: _type = "Task" [ 1373.873463] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.881905] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.902673] env[62820]: DEBUG nova.network.neutron [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Successfully updated port: 66676266-bbc8-4add-aeb0-77fc22873d87 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1374.085398] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.086075] env[62820]: DEBUG nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1374.090588] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.902s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.385121] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.408098] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "refresh_cache-58a26c98-cbf9-491f-8d2c-20281c3d7771" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.408297] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "refresh_cache-58a26c98-cbf9-491f-8d2c-20281c3d7771" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.408426] env[62820]: DEBUG nova.network.neutron [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.476169] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed3e389-058b-4944-a496-b7986fb7b4b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.484811] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f58f7eb-0ec2-4cb4-83bf-723f8b8bc39c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.517053] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fc0d61-5ac3-40a7-afdd-cb089e50564a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.525522] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45a967e-5cbc-4c0d-b870-2f3b9982ef1b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.540872] env[62820]: DEBUG nova.compute.provider_tree [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1374.596936] env[62820]: DEBUG nova.compute.utils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1374.598469] env[62820]: DEBUG nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1374.598709] env[62820]: DEBUG nova.network.neutron [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1374.640708] env[62820]: DEBUG nova.policy [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df960d12e8804e9da89c20d76421decd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ee4b77329e74ec3aa9f50bc0b53ffdb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1374.884719] env[62820]: DEBUG oslo_vmware.api [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695229, 'name': PowerOnVM_Task, 'duration_secs': 0.729035} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.885051] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1374.885289] env[62820]: INFO nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Took 8.63 seconds to spawn the instance on the hypervisor. [ 1374.885483] env[62820]: DEBUG nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1374.886316] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2aeacff-2a8f-4b18-a56a-9a236a1078cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.914768] env[62820]: DEBUG nova.network.neutron [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Successfully created port: ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1374.944690] env[62820]: DEBUG nova.network.neutron [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1375.044388] env[62820]: DEBUG nova.scheduler.client.report [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1375.084187] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1375.084433] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1375.084588] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1375.084765] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1375.084903] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1375.086110] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1375.087023] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1375.087023] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1375.087023] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1375.087023] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1375.087500] env[62820]: DEBUG nova.virt.hardware [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1375.089603] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a59edce-f9c9-408c-805a-d9459aecbdd6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.101744] env[62820]: DEBUG nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1375.108381] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1375.108608] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1375.108760] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1375.108935] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1375.112022] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1375.112022] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1375.112022] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1375.112022] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1375.112022] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1375.112022] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1375.112022] env[62820]: DEBUG nova.virt.hardware [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1375.112022] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8432b6c9-ac57-423b-8329-5a11526156f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.118780] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32f53a5-e6c3-4e1e-a589-c735f9bc3f8a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.122039] env[62820]: DEBUG nova.network.neutron [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Updating instance_info_cache with network_info: [{"id": "66676266-bbc8-4add-aeb0-77fc22873d87", "address": "fa:16:3e:59:ab:54", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66676266-bb", "ovs_interfaceid": "66676266-bbc8-4add-aeb0-77fc22873d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.139819] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1178dea-3c0e-4062-9d30-f2daa36b3ff3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.145113] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:6d:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29363f02-2acb-4e52-8db8-f9743ec7fb99', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1375.154021] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Creating folder: Project (05ec4bd31e8941ee9fc67f16798a9227). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1375.156569] env[62820]: DEBUG oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254bd77-6922-ded4-133b-86195040aa4a/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1375.156876] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3d6712e-be4b-4004-9392-9b7292ed92f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.159444] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d06188-f231-466a-a091-5da516adfa00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.175944] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Created folder: Project (05ec4bd31e8941ee9fc67f16798a9227) in parent group-v353379. [ 1375.176201] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Creating folder: Instances. Parent ref: group-v353459. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1375.176851] env[62820]: DEBUG oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254bd77-6922-ded4-133b-86195040aa4a/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1375.177013] env[62820]: ERROR oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254bd77-6922-ded4-133b-86195040aa4a/disk-0.vmdk due to incomplete transfer. [ 1375.177227] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-839b5098-1af9-4baa-b2bd-34e021e0768a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.179131] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1f65a95f-6d56-4877-9f92-51b913c02298 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.186671] env[62820]: DEBUG oslo_vmware.rw_handles [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254bd77-6922-ded4-133b-86195040aa4a/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1375.186864] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Uploaded image 54b0a9e3-ab0a-4965-8fe7-9b749de83374 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1375.188952] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1375.190169] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c71b3073-9f0a-4a85-8e73-174e397dbdd3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.191683] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Created folder: Instances in parent group-v353459. [ 1375.191921] env[62820]: DEBUG oslo.service.loopingcall [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1375.192118] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 519c961c-557e-4796-88da-047c55d6be44] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1375.192602] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bab23eab-a223-45e9-9110-c77290717ede {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.209404] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1375.209404] env[62820]: value = "task-1695232" [ 1375.209404] env[62820]: _type = "Task" [ 1375.209404] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.215875] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1375.215875] env[62820]: value = "task-1695233" [ 1375.215875] env[62820]: _type = "Task" [ 1375.215875] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.218853] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695232, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.226133] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695233, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.411109] env[62820]: INFO nova.compute.manager [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Took 37.18 seconds to build instance. [ 1375.552394] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.462s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.552678] env[62820]: INFO nova.compute.manager [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Successfully reverted task state from None on failure for instance. [ 1375.563636] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.088s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.565701] env[62820]: INFO nova.compute.claims [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server [None req-3169e0dc-fe5c-49db-8d04-599422d09de5 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Exception during message handling: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a (generation 30): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-78c20d76-69d1-45f6-9c50-350cdc37a452"}]} [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server raise self.value [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server raise self.value [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server raise self.value [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6110, in prep_resize [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server self._reschedule_resize_or_reraise(context, instance, [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6190, in _reschedule_resize_or_reraise [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server raise exc [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6092, in prep_resize [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server self._prep_resize(context, image, instance, [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6005, in _prep_resize [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server with self.rt.resize_claim( [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 238, in resize_claim [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server return self._move_claim( [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 378, in _move_claim [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server self._update(elevated, cn) [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 1375.570630] env[62820]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server raise attempt.get() [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 719, in reraise [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server raise value [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1499, in update_from_provider_tree [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1004, in set_inventory_for_provider [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateConflict( [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a (generation 30): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-78c20d76-69d1-45f6-9c50-350cdc37a452"}]} [ 1375.572098] env[62820]: ERROR oslo_messaging.rpc.server [ 1375.624529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "refresh_cache-58a26c98-cbf9-491f-8d2c-20281c3d7771" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.624910] env[62820]: DEBUG nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Instance network_info: |[{"id": "66676266-bbc8-4add-aeb0-77fc22873d87", "address": "fa:16:3e:59:ab:54", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66676266-bb", "ovs_interfaceid": "66676266-bbc8-4add-aeb0-77fc22873d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1375.629342] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:ab:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66676266-bbc8-4add-aeb0-77fc22873d87', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1375.637667] env[62820]: DEBUG oslo.service.loopingcall [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1375.638579] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1375.638821] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a97064f-39b3-4234-85e5-b0994c5d7f9a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.660845] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1375.660845] env[62820]: value = "task-1695234" [ 1375.660845] env[62820]: _type = "Task" [ 1375.660845] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.671130] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695234, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.720293] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695232, 'name': Destroy_Task, 'duration_secs': 0.471106} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.723624] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Destroyed the VM [ 1375.723908] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1375.724216] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6d19a825-2f19-4b4c-8019-11b6d0748005 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.731087] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695233, 'name': CreateVM_Task, 'duration_secs': 0.470738} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.732362] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 519c961c-557e-4796-88da-047c55d6be44] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1375.732719] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1375.732719] env[62820]: value = "task-1695235" [ 1375.732719] env[62820]: _type = "Task" [ 1375.732719] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.733431] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.733598] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.733953] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1375.734263] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79758b0b-e9e0-41ef-9c11-ae94d1d5a483 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.746331] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695235, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.746673] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1375.746673] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5267fd10-05a6-02e0-5dc1-824e2a00c551" [ 1375.746673] env[62820]: _type = "Task" [ 1375.746673] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.754885] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5267fd10-05a6-02e0-5dc1-824e2a00c551, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.913749] env[62820]: DEBUG oslo_concurrency.lockutils [None req-361d99d5-879e-49e2-8afd-aaaf01582040 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.981s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.140333] env[62820]: DEBUG nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1376.144808] env[62820]: DEBUG nova.compute.manager [req-4e6ce5a0-f431-4827-a81e-2b85b29dd5b9 req-9898fcd0-d901-413b-be0f-e8a31481d962 service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Received event network-vif-plugged-29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1376.145053] env[62820]: DEBUG oslo_concurrency.lockutils [req-4e6ce5a0-f431-4827-a81e-2b85b29dd5b9 req-9898fcd0-d901-413b-be0f-e8a31481d962 service nova] Acquiring lock "519c961c-557e-4796-88da-047c55d6be44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.145302] env[62820]: DEBUG oslo_concurrency.lockutils [req-4e6ce5a0-f431-4827-a81e-2b85b29dd5b9 req-9898fcd0-d901-413b-be0f-e8a31481d962 service nova] Lock "519c961c-557e-4796-88da-047c55d6be44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.145525] env[62820]: DEBUG oslo_concurrency.lockutils [req-4e6ce5a0-f431-4827-a81e-2b85b29dd5b9 req-9898fcd0-d901-413b-be0f-e8a31481d962 service nova] Lock "519c961c-557e-4796-88da-047c55d6be44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.145644] env[62820]: DEBUG nova.compute.manager [req-4e6ce5a0-f431-4827-a81e-2b85b29dd5b9 req-9898fcd0-d901-413b-be0f-e8a31481d962 service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] No waiting events found dispatching network-vif-plugged-29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1376.145826] env[62820]: WARNING nova.compute.manager [req-4e6ce5a0-f431-4827-a81e-2b85b29dd5b9 req-9898fcd0-d901-413b-be0f-e8a31481d962 service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Received unexpected event network-vif-plugged-29363f02-2acb-4e52-8db8-f9743ec7fb99 for instance with vm_state building and task_state spawning. [ 1376.150362] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "262d0714-d7d7-443c-9927-ef03ba9f230e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.150609] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.173254] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695234, 'name': CreateVM_Task, 'duration_secs': 0.360934} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.175799] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1376.175947] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1376.175993] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.176194] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1376.176343] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.176488] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1376.176689] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1376.176844] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1376.177061] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1376.177240] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1376.177469] env[62820]: DEBUG nova.virt.hardware [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1376.177671] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1376.178439] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498b987f-45ed-4685-abaf-8a12626528d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.181542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.186802] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b3926c-ddae-4e0c-8c77-dd70be97e397 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.244764] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695235, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.256897] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5267fd10-05a6-02e0-5dc1-824e2a00c551, 'name': SearchDatastore_Task, 'duration_secs': 0.014155} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.257165] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.257419] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1376.257697] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.257857] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.258048] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.258331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.258684] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1376.258857] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f6568f4-44dd-4e84-be04-139f2ac2b822 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.261168] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8a104f1-e3d7-47bc-b3b0-fec0799c9199 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.266991] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1376.266991] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52320b05-8707-e2f2-58c7-94a6509bd188" [ 1376.266991] env[62820]: _type = "Task" [ 1376.266991] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.271340] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.271527] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1376.272610] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c20b187-645f-4198-84f3-5dfcc8aafcde {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.278496] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52320b05-8707-e2f2-58c7-94a6509bd188, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.282087] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1376.282087] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526644ae-7fb4-c192-c278-8346874fcada" [ 1376.282087] env[62820]: _type = "Task" [ 1376.282087] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.290318] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526644ae-7fb4-c192-c278-8346874fcada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.417384] env[62820]: DEBUG nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1376.747366] env[62820]: DEBUG oslo_vmware.api [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695235, 'name': RemoveSnapshot_Task, 'duration_secs': 0.817887} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.750280] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1376.750552] env[62820]: INFO nova.compute.manager [None req-1caffdc9-ecc2-4e0a-842b-46d60af47aec tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Took 14.33 seconds to snapshot the instance on the hypervisor. [ 1376.785871] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52320b05-8707-e2f2-58c7-94a6509bd188, 'name': SearchDatastore_Task, 'duration_secs': 0.010945} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.790621] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.790935] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1376.791304] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.800517] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526644ae-7fb4-c192-c278-8346874fcada, 'name': SearchDatastore_Task, 'duration_secs': 0.010605} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.802201] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b604a28-3f34-4e8e-be86-ae2c10c5c8b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.812541] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1376.812541] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521ffca0-fa28-a0dd-70a4-b65a3febe01d" [ 1376.812541] env[62820]: _type = "Task" [ 1376.812541] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.820204] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521ffca0-fa28-a0dd-70a4-b65a3febe01d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.856324] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "93098210-ca91-41b4-9b12-96fa105a2ab3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.856683] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "93098210-ca91-41b4-9b12-96fa105a2ab3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.856900] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "93098210-ca91-41b4-9b12-96fa105a2ab3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.857101] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "93098210-ca91-41b4-9b12-96fa105a2ab3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.857273] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "93098210-ca91-41b4-9b12-96fa105a2ab3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.860089] env[62820]: INFO nova.compute.manager [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Terminating instance [ 1376.892999] env[62820]: DEBUG nova.network.neutron [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Successfully updated port: ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1376.945023] env[62820]: INFO nova.compute.manager [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Rescuing [ 1376.945023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.945193] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquired lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.945409] env[62820]: DEBUG nova.network.neutron [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1376.973479] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.143244] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c4805c-226b-4c21-9489-10d20362c961 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.153690] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66766784-8164-4644-bbd0-e86bc696ab17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.190577] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7928c31-02ed-4b55-95ef-cb54db540535 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.198776] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744ccb5e-6822-4a0a-b738-e7cb5eedd7b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.214519] env[62820]: DEBUG nova.compute.provider_tree [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1377.322436] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521ffca0-fa28-a0dd-70a4-b65a3febe01d, 'name': SearchDatastore_Task, 'duration_secs': 0.015042} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.322833] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.323218] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 519c961c-557e-4796-88da-047c55d6be44/519c961c-557e-4796-88da-047c55d6be44.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1377.323638] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.323930] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1377.324250] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5991791b-bbb8-46d2-be9d-65f45a7bca22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.326448] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21be83b2-c9ce-4dd6-ab2b-585288ae286c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.333161] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1377.333161] env[62820]: value = "task-1695236" [ 1377.333161] env[62820]: _type = "Task" [ 1377.333161] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.337772] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1377.338168] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1377.341614] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64b6acc9-aeea-48ef-9667-585d38891d94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.343930] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695236, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.347766] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1377.347766] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5223b75d-3b37-bdb1-116f-8473f8cdf4cc" [ 1377.347766] env[62820]: _type = "Task" [ 1377.347766] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.355885] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5223b75d-3b37-bdb1-116f-8473f8cdf4cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.364110] env[62820]: DEBUG nova.compute.manager [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1377.365019] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1377.365422] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a846ac31-6a4f-4157-8c3e-c397366801fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.374020] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1377.374020] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6aee6e86-8947-43c5-bbdc-ba8b374f1c18 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.378602] env[62820]: DEBUG oslo_vmware.api [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1377.378602] env[62820]: value = "task-1695237" [ 1377.378602] env[62820]: _type = "Task" [ 1377.378602] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.383321] env[62820]: DEBUG oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5299a05e-a5c2-363e-5fcb-386e5baee05e/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1377.384199] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f563eb85-6a6a-43ad-9c26-7f79def14c77 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.391557] env[62820]: DEBUG oslo_vmware.api [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.392995] env[62820]: DEBUG oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5299a05e-a5c2-363e-5fcb-386e5baee05e/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1377.393222] env[62820]: ERROR oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5299a05e-a5c2-363e-5fcb-386e5baee05e/disk-0.vmdk due to incomplete transfer. [ 1377.393742] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8acf064e-2669-40e4-a042-8db1ca96dae7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.399880] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquiring lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.400141] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquired lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.400382] env[62820]: DEBUG nova.network.neutron [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1377.403646] env[62820]: DEBUG oslo_vmware.rw_handles [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5299a05e-a5c2-363e-5fcb-386e5baee05e/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1377.403955] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Uploaded image 5d974e0f-a07c-41af-8806-ed0ddde539f1 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1377.405948] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1377.406530] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bf64f259-3372-453d-95a7-80edefe47c96 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.412978] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1377.412978] env[62820]: value = "task-1695238" [ 1377.412978] env[62820]: _type = "Task" [ 1377.412978] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.422696] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695238, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.574651] env[62820]: DEBUG nova.compute.manager [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1377.575919] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68eded89-1f53-4dda-8bd0-7ea49323e0ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.694495] env[62820]: DEBUG nova.network.neutron [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [{"id": "507956c3-f482-428d-b807-71f6d0ca9cb4", "address": "fa:16:3e:ca:1a:49", "network": {"id": "22889f2c-0fd9-4556-a2e7-8647c8c1faa6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1091669222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74f3dd3dcc10421f803a0039e3add051", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507956c3-f4", "ovs_interfaceid": "507956c3-f482-428d-b807-71f6d0ca9cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.717980] env[62820]: DEBUG nova.scheduler.client.report [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1377.845827] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695236, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.860753] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5223b75d-3b37-bdb1-116f-8473f8cdf4cc, 'name': SearchDatastore_Task, 'duration_secs': 0.015991} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.861653] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7ec15d5-7024-47b4-8abc-8d7fb10dce0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.868637] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1377.868637] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527972bb-4ca0-6667-0d7e-5e3c8ff54a3d" [ 1377.868637] env[62820]: _type = "Task" [ 1377.868637] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.880323] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527972bb-4ca0-6667-0d7e-5e3c8ff54a3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.889120] env[62820]: DEBUG oslo_vmware.api [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695237, 'name': PowerOffVM_Task, 'duration_secs': 0.261731} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.889529] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1377.889652] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1377.889884] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4cd2ff7-e69e-4bc6-9b46-b63da7cdbdb2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.922688] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695238, 'name': Destroy_Task, 'duration_secs': 0.480831} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.922948] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Destroyed the VM [ 1377.923222] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1377.923486] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-47b89bcf-57dd-46f0-9fd5-22516fbaa821 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.930923] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1377.930923] env[62820]: value = "task-1695240" [ 1377.930923] env[62820]: _type = "Task" [ 1377.930923] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.942676] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695240, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.948182] env[62820]: DEBUG nova.network.neutron [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1377.973445] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1377.973555] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1377.973741] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleting the datastore file [datastore1] 93098210-ca91-41b4-9b12-96fa105a2ab3 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1377.974021] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2072f52-ece8-4693-a026-1dabbec11dfa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.980355] env[62820]: DEBUG oslo_vmware.api [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1377.980355] env[62820]: value = "task-1695241" [ 1377.980355] env[62820]: _type = "Task" [ 1377.980355] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.988781] env[62820]: DEBUG oslo_vmware.api [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.089311] env[62820]: INFO nova.compute.manager [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] instance snapshotting [ 1378.089599] env[62820]: WARNING nova.compute.manager [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1378.092402] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76215902-8edb-41fd-b9ee-ef9a4756629b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.112486] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bef9c7-0b56-4b43-b54f-d0615d636a8e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.117012] env[62820]: DEBUG nova.network.neutron [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Updating instance_info_cache with network_info: [{"id": "ad48a330-41a2-437b-92eb-66a7086d8380", "address": "fa:16:3e:03:3f:d6", "network": {"id": "c199e1e0-d60c-4b9a-b659-b176db682d02", "bridge": "br-int", "label": "tempest-ServersTestJSON-850235590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ee4b77329e74ec3aa9f50bc0b53ffdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad48a330-41", "ovs_interfaceid": "ad48a330-41a2-437b-92eb-66a7086d8380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.198330] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Releasing lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.227184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.227184] env[62820]: DEBUG nova.compute.manager [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1378.230433] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.883s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.231094] env[62820]: DEBUG nova.objects.instance [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lazy-loading 'resources' on Instance uuid f2658dfa-baed-4ff3-8c7e-733bbcf1916e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1378.344563] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695236, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.378883] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527972bb-4ca0-6667-0d7e-5e3c8ff54a3d, 'name': SearchDatastore_Task, 'duration_secs': 0.047789} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.379166] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.379424] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 58a26c98-cbf9-491f-8d2c-20281c3d7771/58a26c98-cbf9-491f-8d2c-20281c3d7771.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1378.379678] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-117db5c8-4596-4734-8363-62b2524cf5df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.386954] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1378.386954] env[62820]: value = "task-1695242" [ 1378.386954] env[62820]: _type = "Task" [ 1378.386954] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.395361] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.441770] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Received event network-changed-29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1378.441977] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Refreshing instance network info cache due to event network-changed-29363f02-2acb-4e52-8db8-f9743ec7fb99. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1378.442209] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquiring lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.442353] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquired lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.442514] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Refreshing network info cache for port 29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1378.449458] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695240, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.490838] env[62820]: DEBUG oslo_vmware.api [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.619634] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Releasing lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.619989] env[62820]: DEBUG nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Instance network_info: |[{"id": "ad48a330-41a2-437b-92eb-66a7086d8380", "address": "fa:16:3e:03:3f:d6", "network": {"id": "c199e1e0-d60c-4b9a-b659-b176db682d02", "bridge": "br-int", "label": "tempest-ServersTestJSON-850235590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ee4b77329e74ec3aa9f50bc0b53ffdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad48a330-41", "ovs_interfaceid": "ad48a330-41a2-437b-92eb-66a7086d8380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1378.620427] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:3f:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad48a330-41a2-437b-92eb-66a7086d8380', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1378.628347] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Creating folder: Project (0ee4b77329e74ec3aa9f50bc0b53ffdb). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1378.629399] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1378.629624] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c04098a-9406-4004-b1c6-d65ba9edd638 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.631478] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3007a083-ae96-4b11-8193-8bf265c7f19f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.640044] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1378.640044] env[62820]: value = "task-1695244" [ 1378.640044] env[62820]: _type = "Task" [ 1378.640044] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.645405] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Created folder: Project (0ee4b77329e74ec3aa9f50bc0b53ffdb) in parent group-v353379. [ 1378.645646] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Creating folder: Instances. Parent ref: group-v353463. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1378.648498] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de49ad6a-d98a-4bab-9327-3cb9ff77e518 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.650012] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695244, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.657465] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Created folder: Instances in parent group-v353463. [ 1378.657698] env[62820]: DEBUG oslo.service.loopingcall [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.657888] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1378.658099] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28770f29-ad2f-44d6-b8ee-3f71e5ed0128 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.678784] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1378.678784] env[62820]: value = "task-1695246" [ 1378.678784] env[62820]: _type = "Task" [ 1378.678784] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.686467] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695246, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.734206] env[62820]: DEBUG nova.compute.utils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1378.738523] env[62820]: DEBUG nova.compute.manager [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Not allocating networking since 'none' was specified. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1378.844015] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695236, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.899974] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.941320] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695240, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.991421] env[62820]: DEBUG oslo_vmware.api [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.656817} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.991704] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.991893] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1378.992177] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1378.992257] env[62820]: INFO nova.compute.manager [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1378.992504] env[62820]: DEBUG oslo.service.loopingcall [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.992721] env[62820]: DEBUG nova.compute.manager [-] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1378.992820] env[62820]: DEBUG nova.network.neutron [-] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1379.151914] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695244, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.168084] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e44a2ab-3e51-4776-b6a0-14a41314a68c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.178709] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b96996-ee8c-4fb1-a38a-fac49773f6d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.193636] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695246, 'name': CreateVM_Task, 'duration_secs': 0.449136} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.220093] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1379.223787] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.223965] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.224355] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1379.225371] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3320c527-6533-4c3a-bb3e-9ec8bac512d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.228455] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd238e2d-4dd3-4fea-935b-8d813087934f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.234392] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1379.234392] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5200f771-0eee-0c2f-6ac6-c353c0bea989" [ 1379.234392] env[62820]: _type = "Task" [ 1379.234392] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.241527] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f36f77c-783e-4a3c-8aac-1f3a3d79101c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.246482] env[62820]: DEBUG nova.compute.manager [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1379.249849] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1379.250481] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1719e3c1-96c8-40a5-9b07-784f5a31862c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.257565] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5200f771-0eee-0c2f-6ac6-c353c0bea989, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.267161] env[62820]: DEBUG nova.compute.provider_tree [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.271499] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1379.271499] env[62820]: value = "task-1695247" [ 1379.271499] env[62820]: _type = "Task" [ 1379.271499] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.274650] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Updated VIF entry in instance network info cache for port 29363f02-2acb-4e52-8db8-f9743ec7fb99. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1379.275229] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Updating instance_info_cache with network_info: [{"id": "29363f02-2acb-4e52-8db8-f9743ec7fb99", "address": "fa:16:3e:ef:6d:5b", "network": {"id": "637634a0-8c6d-4e06-945b-58f2e86d4edf", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2105723280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05ec4bd31e8941ee9fc67f16798a9227", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29363f02-2a", "ovs_interfaceid": "29363f02-2acb-4e52-8db8-f9743ec7fb99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.283636] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.346804] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695236, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.623812} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.347114] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 519c961c-557e-4796-88da-047c55d6be44/519c961c-557e-4796-88da-047c55d6be44.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1379.347325] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1379.347578] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36840c1b-5083-45c2-9157-cf69dfa5646b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.356554] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1379.356554] env[62820]: value = "task-1695248" [ 1379.356554] env[62820]: _type = "Task" [ 1379.356554] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.364485] env[62820]: DEBUG nova.compute.manager [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1379.364485] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504fdf56-90ef-461b-8a90-38d390ddc3ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.376369] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.381751] env[62820]: DEBUG nova.compute.manager [req-1c07cd51-2287-4b44-bd36-393ea04e46dd req-3f1638ea-d8b3-4802-8c34-c5947c229544 service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Received event network-vif-deleted-cc74739f-914e-44f2-aa7c-dd0cef391791 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1379.381960] env[62820]: INFO nova.compute.manager [req-1c07cd51-2287-4b44-bd36-393ea04e46dd req-3f1638ea-d8b3-4802-8c34-c5947c229544 service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Neutron deleted interface cc74739f-914e-44f2-aa7c-dd0cef391791; detaching it from the instance and deleting it from the info cache [ 1379.382176] env[62820]: DEBUG nova.network.neutron [req-1c07cd51-2287-4b44-bd36-393ea04e46dd req-3f1638ea-d8b3-4802-8c34-c5947c229544 service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.397272] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695242, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.442114] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695240, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.655018] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695244, 'name': CreateSnapshot_Task, 'duration_secs': 0.92303} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.655018] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1379.655018] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ec2510-4610-4b9b-a697-461846076cca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.747124] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5200f771-0eee-0c2f-6ac6-c353c0bea989, 'name': SearchDatastore_Task, 'duration_secs': 0.060671} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.747533] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.747827] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1379.748088] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.748239] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.748417] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1379.748703] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d333fd9d-3548-4e62-b659-ae3df88dcdfb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.760297] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1379.760471] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1379.761493] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dcd3ea1-3186-4895-a858-ab0707bcbedb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.767200] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1379.767200] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c06ee0-5487-41ac-2f47-4c9bccadbcef" [ 1379.767200] env[62820]: _type = "Task" [ 1379.767200] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.771097] env[62820]: DEBUG nova.scheduler.client.report [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1379.782556] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Releasing lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.782798] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Received event network-vif-plugged-66676266-bbc8-4add-aeb0-77fc22873d87 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1379.782989] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquiring lock "58a26c98-cbf9-491f-8d2c-20281c3d7771-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.783204] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Lock "58a26c98-cbf9-491f-8d2c-20281c3d7771-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.783367] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Lock "58a26c98-cbf9-491f-8d2c-20281c3d7771-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.783543] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] No waiting events found dispatching network-vif-plugged-66676266-bbc8-4add-aeb0-77fc22873d87 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1379.783707] env[62820]: WARNING nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Received unexpected event network-vif-plugged-66676266-bbc8-4add-aeb0-77fc22873d87 for instance with vm_state building and task_state spawning. [ 1379.783870] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Received event network-changed-66676266-bbc8-4add-aeb0-77fc22873d87 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1379.784064] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Refreshing instance network info cache due to event network-changed-66676266-bbc8-4add-aeb0-77fc22873d87. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1379.784260] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquiring lock "refresh_cache-58a26c98-cbf9-491f-8d2c-20281c3d7771" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.784400] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquired lock "refresh_cache-58a26c98-cbf9-491f-8d2c-20281c3d7771" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.784557] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Refreshing network info cache for port 66676266-bbc8-4add-aeb0-77fc22873d87 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1379.785838] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c06ee0-5487-41ac-2f47-4c9bccadbcef, 'name': SearchDatastore_Task, 'duration_secs': 0.008031} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.790436] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695247, 'name': PowerOffVM_Task, 'duration_secs': 0.219116} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.790436] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2d75240-50b7-4e40-91ff-5d58ba36bac5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.792543] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1379.794033] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8d8d8a-b8f6-4ca1-85e7-482f0ff220e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.800213] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1379.800213] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a6fdc7-bef7-500e-3a5e-024d7ec7918c" [ 1379.800213] env[62820]: _type = "Task" [ 1379.800213] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.814786] env[62820]: DEBUG nova.network.neutron [-] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.819989] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0063f5-3235-43b3-ae33-c0af923316cc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.833705] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a6fdc7-bef7-500e-3a5e-024d7ec7918c, 'name': SearchDatastore_Task, 'duration_secs': 0.008736} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.833964] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.834227] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0eb62424-0ee6-4ff4-94c2-bb6a10861759/0eb62424-0ee6-4ff4-94c2-bb6a10861759.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1379.834468] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdd75076-e50f-4948-b50e-72b18ef18745 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.844042] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1379.844042] env[62820]: value = "task-1695249" [ 1379.844042] env[62820]: _type = "Task" [ 1379.844042] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.853675] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.855558] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1379.855935] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a11841f-5123-46a4-8e28-8af5c1440257 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.865661] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090446} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.866875] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1379.867227] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1379.867227] env[62820]: value = "task-1695250" [ 1379.867227] env[62820]: _type = "Task" [ 1379.867227] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.867900] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d70c3d-d73b-4959-9cb7-943057c2b7ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.878633] env[62820]: INFO nova.compute.manager [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] instance snapshotting [ 1379.889320] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1379.889544] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1379.889806] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.889968] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.890160] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1379.898954] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 519c961c-557e-4796-88da-047c55d6be44/519c961c-557e-4796-88da-047c55d6be44.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1379.899475] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-004cd8c4-99aa-4fbd-b870-195be67c5a9f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.901253] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd5d53fb-da83-41cb-bdba-cfeeba25e687 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.905848] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a5f59f1-2334-475c-9f63-8161172aa834 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.920298] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82ce6dc-54a7-4338-b3cf-f95400a689de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.930504] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695242, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.018865} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.934848] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 58a26c98-cbf9-491f-8d2c-20281c3d7771/58a26c98-cbf9-491f-8d2c-20281c3d7771.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1379.935087] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1379.935496] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1379.935649] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1379.936430] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1379.936430] env[62820]: value = "task-1695251" [ 1379.936430] env[62820]: _type = "Task" [ 1379.936430] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.937013] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ddfcf100-af5d-46c0-9639-1f14e5532666 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.941324] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d75e768-ae63-4c9b-9a27-0c443ec9310d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.954145] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ae920f8-1b1c-4d4c-9809-7fcc3a77208a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.977258] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf9c7d0-5ce5-4ea7-9853-5ca5e842b533 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.984265] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1379.984265] env[62820]: value = "task-1695252" [ 1379.984265] env[62820]: _type = "Task" [ 1379.984265] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.984265] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1379.984265] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527de168-6ef5-91d1-b202-f3e620e4cac1" [ 1379.984265] env[62820]: _type = "Task" [ 1379.984265] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.984265] env[62820]: DEBUG oslo_vmware.api [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695240, 'name': RemoveSnapshot_Task, 'duration_secs': 1.661304} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.001110] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1380.001386] env[62820]: INFO nova.compute.manager [None req-8c9f1100-9452-4cf6-9307-f355122cb363 tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Took 15.40 seconds to snapshot the instance on the hypervisor. [ 1380.004145] env[62820]: DEBUG nova.compute.manager [req-1c07cd51-2287-4b44-bd36-393ea04e46dd req-3f1638ea-d8b3-4802-8c34-c5947c229544 service nova] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Detach interface failed, port_id=cc74739f-914e-44f2-aa7c-dd0cef391791, reason: Instance 93098210-ca91-41b4-9b12-96fa105a2ab3 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1380.016496] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.028685] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527de168-6ef5-91d1-b202-f3e620e4cac1, 'name': SearchDatastore_Task, 'duration_secs': 0.010376} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.031261] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695252, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.032385] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae78bbb4-b482-449b-ac3f-b8d692dd6960 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.038223] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1380.038223] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52042e49-4765-a811-29fc-4ab4fe1150df" [ 1380.038223] env[62820]: _type = "Task" [ 1380.038223] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.046917] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52042e49-4765-a811-29fc-4ab4fe1150df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.173934] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1380.174296] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0352bfc7-2ede-43c9-8452-1e406decd4b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.183854] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1380.183854] env[62820]: value = "task-1695253" [ 1380.183854] env[62820]: _type = "Task" [ 1380.183854] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.191903] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695253, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.256608] env[62820]: DEBUG nova.compute.manager [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1380.277923] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1380.278321] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1380.278656] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1380.278948] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1380.279233] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1380.279494] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1380.279848] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1380.280119] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1380.280335] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1380.280532] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1380.280735] env[62820]: DEBUG nova.virt.hardware [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1380.281501] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.284007] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4d932e-c50a-4a79-8610-d32d9a25e4b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.286912] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.979s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.288512] env[62820]: INFO nova.compute.claims [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1380.298625] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ee43f9-e06a-4c40-8ee8-f6f1b46504dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.313193] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1380.319370] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Creating folder: Project (aec241a4e9ad4230a0ba9ba5de62e830). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.320591] env[62820]: INFO nova.scheduler.client.report [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Deleted allocations for instance f2658dfa-baed-4ff3-8c7e-733bbcf1916e [ 1380.322033] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccc3fa56-cba7-4274-8e72-f89a7d904f0e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.325915] env[62820]: INFO nova.compute.manager [-] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Took 1.33 seconds to deallocate network for instance. [ 1380.337890] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Created folder: Project (aec241a4e9ad4230a0ba9ba5de62e830) in parent group-v353379. [ 1380.337890] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Creating folder: Instances. Parent ref: group-v353468. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.338077] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f43c8a86-44e3-4b5e-9c73-96c42b421e74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.349218] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Created folder: Instances in parent group-v353468. [ 1380.349459] env[62820]: DEBUG oslo.service.loopingcall [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.350029] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1380.350247] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be399d3a-caec-478c-8601-4057eb6d6f64 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.366926] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.371474] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1380.371474] env[62820]: value = "task-1695256" [ 1380.371474] env[62820]: _type = "Task" [ 1380.371474] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.382370] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695256, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.465860] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695251, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.517525] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081039} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.525823] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1380.525823] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48c44a1-6fa8-4b2d-a6b9-a7d6c0e1bbf7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.542144] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1380.551170] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 58a26c98-cbf9-491f-8d2c-20281c3d7771/58a26c98-cbf9-491f-8d2c-20281c3d7771.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1380.552467] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Updated VIF entry in instance network info cache for port 66676266-bbc8-4add-aeb0-77fc22873d87. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1380.553011] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Updating instance_info_cache with network_info: [{"id": "66676266-bbc8-4add-aeb0-77fc22873d87", "address": "fa:16:3e:59:ab:54", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66676266-bb", "ovs_interfaceid": "66676266-bbc8-4add-aeb0-77fc22873d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.554821] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1b318159-8166-4856-a364-4a80f2226a28 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.560726] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddaccb52-8184-40c0-9edf-6c95ba492638 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.585622] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52042e49-4765-a811-29fc-4ab4fe1150df, 'name': SearchDatastore_Task, 'duration_secs': 0.008386} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.589436] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.591695] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. {{(pid=62820) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1380.591695] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1380.591695] env[62820]: value = "task-1695257" [ 1380.591695] env[62820]: _type = "Task" [ 1380.591695] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.591695] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1380.591695] env[62820]: value = "task-1695258" [ 1380.591695] env[62820]: _type = "Task" [ 1380.591695] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.591695] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d03eaed5-6528-4759-929c-6ba3e6448264 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.609505] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695257, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.615935] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1380.615935] env[62820]: value = "task-1695259" [ 1380.615935] env[62820]: _type = "Task" [ 1380.615935] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.616358] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.629964] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.695946] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695253, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.831066] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74c48d8e-d516-459e-a233-7bb0ad247da4 tempest-InstanceActionsTestJSON-1723434856 tempest-InstanceActionsTestJSON-1723434856-project-member] Lock "f2658dfa-baed-4ff3-8c7e-733bbcf1916e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.906s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.834323] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.855539] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.982215} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.855911] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0eb62424-0ee6-4ff4-94c2-bb6a10861759/0eb62424-0ee6-4ff4-94c2-bb6a10861759.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1380.856034] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1380.856528] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edc2aba4-d54b-400c-88af-60121cec484c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.862308] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1380.862308] env[62820]: value = "task-1695260" [ 1380.862308] env[62820]: _type = "Task" [ 1380.862308] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.870817] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695260, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.883855] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695256, 'name': CreateVM_Task, 'duration_secs': 0.48928} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.883855] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1380.883855] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.883855] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.883855] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1380.883855] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aacf912-eee8-41bb-8c08-0c1c98eac76f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.889018] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1380.889018] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d02355-1a46-2688-ce69-fe66576831a5" [ 1380.889018] env[62820]: _type = "Task" [ 1380.889018] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.900411] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d02355-1a46-2688-ce69-fe66576831a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.967914] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695251, 'name': ReconfigVM_Task, 'duration_secs': 0.582946} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.969039] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 519c961c-557e-4796-88da-047c55d6be44/519c961c-557e-4796-88da-047c55d6be44.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1380.969512] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75bb4882-eea4-434a-af06-f3bbc85c218c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.978584] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1380.978584] env[62820]: value = "task-1695261" [ 1380.978584] env[62820]: _type = "Task" [ 1380.978584] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.990285] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695261, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.077642] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Releasing lock "refresh_cache-58a26c98-cbf9-491f-8d2c-20281c3d7771" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.078073] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Received event network-vif-plugged-ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1381.078447] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquiring lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.078525] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.078662] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.078871] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] No waiting events found dispatching network-vif-plugged-ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1381.079092] env[62820]: WARNING nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Received unexpected event network-vif-plugged-ad48a330-41a2-437b-92eb-66a7086d8380 for instance with vm_state building and task_state spawning. [ 1381.079268] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Received event network-changed-ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1381.079425] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Refreshing instance network info cache due to event network-changed-ad48a330-41a2-437b-92eb-66a7086d8380. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1381.080284] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquiring lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.080284] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquired lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.080284] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Refreshing network info cache for port ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1381.108986] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695257, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.113623] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.128976] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695259, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.200963] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695253, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.373746] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695260, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060941} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.374033] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1381.374686] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0813ae-a89b-4a5c-b9ae-b85ab90324f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.400289] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 0eb62424-0ee6-4ff4-94c2-bb6a10861759/0eb62424-0ee6-4ff4-94c2-bb6a10861759.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1381.406953] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5e5c1af-5aec-4169-b31f-1453907bcccf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.431014] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d02355-1a46-2688-ce69-fe66576831a5, 'name': SearchDatastore_Task, 'duration_secs': 0.011746} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.431334] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1381.431334] env[62820]: value = "task-1695262" [ 1381.431334] env[62820]: _type = "Task" [ 1381.431334] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.431884] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.432312] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1381.432587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.432913] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.432913] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.436408] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a4b9a20-4f74-4107-811d-1e5268ed2a28 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.449527] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695262, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.452134] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1381.452134] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1381.454818] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67a13cbd-dd29-4583-942c-9fc02e7c8348 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.461564] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1381.461564] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528d6d1e-9177-22be-893d-783379e442fd" [ 1381.461564] env[62820]: _type = "Task" [ 1381.461564] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.469673] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528d6d1e-9177-22be-893d-783379e442fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.485361] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695261, 'name': Rename_Task, 'duration_secs': 0.159722} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.488424] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1381.489174] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a05ccc2-c895-4ecf-8057-57e4d9419fba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.494566] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1381.494566] env[62820]: value = "task-1695263" [ 1381.494566] env[62820]: _type = "Task" [ 1381.494566] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.506483] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.610402] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695258, 'name': ReconfigVM_Task, 'duration_secs': 0.713848} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.613537] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 58a26c98-cbf9-491f-8d2c-20281c3d7771/58a26c98-cbf9-491f-8d2c-20281c3d7771.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1381.614228] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695257, 'name': CreateSnapshot_Task, 'duration_secs': 0.855134} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.614839] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dadf771-03ee-46fd-9ebc-cde0206d61e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.616262] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1381.619565] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e11a678-15fa-4194-a960-6060d6c716b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.630019] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1381.630019] env[62820]: value = "task-1695264" [ 1381.630019] env[62820]: _type = "Task" [ 1381.630019] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.644976] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695259, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627891} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.645719] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. [ 1381.648035] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5ffe26-7cc7-4a35-a170-0d66b90dc993 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.653830] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695264, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.677701] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1381.682701] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09c65bd5-dd8e-455e-ac5d-d7f7c0659459 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.706212] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695253, 'name': CloneVM_Task, 'duration_secs': 1.330867} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.707444] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Created linked-clone VM from snapshot [ 1381.707765] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1381.707765] env[62820]: value = "task-1695265" [ 1381.707765] env[62820]: _type = "Task" [ 1381.707765] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.710833] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530f4f3b-01f3-4018-9c88-1a3c66fc592e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.723067] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Uploading image 73e7e1b6-32b9-4a2f-84d9-2ee4537bfc3d {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1381.731327] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.759381] env[62820]: DEBUG oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1381.759381] env[62820]: value = "vm-353467" [ 1381.759381] env[62820]: _type = "VirtualMachine" [ 1381.759381] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1381.759723] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e4bf8676-1b9b-41cc-8869-7c3a347e5887 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.766389] env[62820]: DEBUG oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lease: (returnval){ [ 1381.766389] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5263d3fb-1dfc-0a93-313c-23a0165ee6fe" [ 1381.766389] env[62820]: _type = "HttpNfcLease" [ 1381.766389] env[62820]: } obtained for exporting VM: (result){ [ 1381.766389] env[62820]: value = "vm-353467" [ 1381.766389] env[62820]: _type = "VirtualMachine" [ 1381.766389] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1381.766646] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the lease: (returnval){ [ 1381.766646] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5263d3fb-1dfc-0a93-313c-23a0165ee6fe" [ 1381.766646] env[62820]: _type = "HttpNfcLease" [ 1381.766646] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1381.778875] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1381.778875] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5263d3fb-1dfc-0a93-313c-23a0165ee6fe" [ 1381.778875] env[62820]: _type = "HttpNfcLease" [ 1381.778875] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1381.778875] env[62820]: DEBUG oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1381.778875] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5263d3fb-1dfc-0a93-313c-23a0165ee6fe" [ 1381.778875] env[62820]: _type = "HttpNfcLease" [ 1381.778875] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1381.781077] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae34502d-2e30-4ddb-954d-ac8b478b1227 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.789081] env[62820]: DEBUG oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292751d-c397-3603-ab75-e2378f24ecf5/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1381.789278] env[62820]: DEBUG oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292751d-c397-3603-ab75-e2378f24ecf5/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1381.883555] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8fc9b86f-17a1-4586-b305-076decb8f45d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.900463] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Updated VIF entry in instance network info cache for port ad48a330-41a2-437b-92eb-66a7086d8380. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1381.901100] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Updating instance_info_cache with network_info: [{"id": "ad48a330-41a2-437b-92eb-66a7086d8380", "address": "fa:16:3e:03:3f:d6", "network": {"id": "c199e1e0-d60c-4b9a-b659-b176db682d02", "bridge": "br-int", "label": "tempest-ServersTestJSON-850235590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ee4b77329e74ec3aa9f50bc0b53ffdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad48a330-41", "ovs_interfaceid": "ad48a330-41a2-437b-92eb-66a7086d8380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.927610] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674e57f7-f779-4434-87ed-cc3d80d8a24e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.936719] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquiring lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.936979] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.937203] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquiring lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.937385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.937553] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.940069] env[62820]: INFO nova.compute.manager [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Terminating instance [ 1381.947063] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54178b31-2844-4c74-8596-4490cd5405ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.956492] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695262, 'name': ReconfigVM_Task, 'duration_secs': 0.363596} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.984022] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 0eb62424-0ee6-4ff4-94c2-bb6a10861759/0eb62424-0ee6-4ff4-94c2-bb6a10861759.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1381.984022] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-880e56bd-fc4d-44a6-991c-9f3b65a99164 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.988526] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3addc3f8-d59a-44b5-9bf3-2f3b706bb586 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.004145] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3476cec9-1a2f-484d-bf06-3e4548afd31d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.008137] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1382.008137] env[62820]: value = "task-1695267" [ 1382.008137] env[62820]: _type = "Task" [ 1382.008137] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.008640] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528d6d1e-9177-22be-893d-783379e442fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009927} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.012807] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-934d9697-1fa6-4857-831e-9b0dad695bc0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.025912] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695263, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.026429] env[62820]: DEBUG nova.compute.provider_tree [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1382.033441] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695267, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.036599] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1382.036599] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5255b11e-c42b-e4e3-8f61-ad745beb40c8" [ 1382.036599] env[62820]: _type = "Task" [ 1382.036599] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.044970] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5255b11e-c42b-e4e3-8f61-ad745beb40c8, 'name': SearchDatastore_Task, 'duration_secs': 0.009341} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.046144] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.046144] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1382.046309] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0b8cea5-1a88-4b9f-ac20-10d036c376ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.053900] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1382.053900] env[62820]: value = "task-1695268" [ 1382.053900] env[62820]: _type = "Task" [ 1382.053900] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.064135] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.156463] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1382.156837] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695264, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.157126] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2bccc1d7-bd7d-4944-8270-008f27e4dd72 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.165376] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1382.165376] env[62820]: value = "task-1695269" [ 1382.165376] env[62820]: _type = "Task" [ 1382.165376] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.173495] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695269, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.221717] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695265, 'name': ReconfigVM_Task, 'duration_secs': 0.457325} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.222040] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Reconfigured VM instance instance-00000017 to attach disk [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1382.222923] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae0a0e7-310b-459a-9ae7-0beda5b565ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.248751] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad2bb3f1-26f1-4375-9085-3976dc45cad4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.264173] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1382.264173] env[62820]: value = "task-1695270" [ 1382.264173] env[62820]: _type = "Task" [ 1382.264173] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.274563] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695270, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.404726] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Releasing lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.405254] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Received event network-changed-5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1382.405579] env[62820]: DEBUG nova.compute.manager [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Refreshing instance network info cache due to event network-changed-5d1e82ae-c035-4664-9764-24afac8896b1. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1382.406566] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquiring lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.406787] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Acquired lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.407430] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Refreshing network info cache for port 5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1382.444630] env[62820]: DEBUG nova.compute.manager [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1382.444630] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1382.445275] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993805fe-897f-44ff-be78-d2193978cb43 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.455508] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1382.455858] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-739080c1-59b9-4476-a2e6-21211aa3b959 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.462660] env[62820]: DEBUG oslo_vmware.api [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1382.462660] env[62820]: value = "task-1695271" [ 1382.462660] env[62820]: _type = "Task" [ 1382.462660] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.472194] env[62820]: DEBUG oslo_vmware.api [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.506481] env[62820]: DEBUG oslo_vmware.api [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695263, 'name': PowerOnVM_Task, 'duration_secs': 0.52629} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.506870] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1382.507359] env[62820]: INFO nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Took 11.47 seconds to spawn the instance on the hypervisor. [ 1382.507784] env[62820]: DEBUG nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1382.508746] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5191c102-7297-4e66-acc3-a7c7f266b1db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.525306] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695267, 'name': Rename_Task, 'duration_secs': 0.241518} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.526606] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.526815] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-769ca7ed-07ad-4b4f-b4b5-91fc9075fe64 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.538546] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1382.538546] env[62820]: value = "task-1695272" [ 1382.538546] env[62820]: _type = "Task" [ 1382.538546] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.548732] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.552876] env[62820]: ERROR nova.scheduler.client.report [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [req-dd12fd22-de6b-4863-90e0-eac96b7cc0a5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dd12fd22-de6b-4863-90e0-eac96b7cc0a5"}]} [ 1382.568857] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695268, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.576016] env[62820]: DEBUG nova.scheduler.client.report [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1382.603032] env[62820]: DEBUG nova.scheduler.client.report [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1382.603032] env[62820]: DEBUG nova.compute.provider_tree [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1382.621027] env[62820]: DEBUG nova.scheduler.client.report [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1382.658080] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695264, 'name': Rename_Task, 'duration_secs': 0.800573} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.658080] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.658080] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a2a0ef96-2745-4922-aee9-43b24ddd3f70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.660698] env[62820]: DEBUG nova.scheduler.client.report [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1382.677555] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1382.677555] env[62820]: value = "task-1695273" [ 1382.677555] env[62820]: _type = "Task" [ 1382.677555] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.689870] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695269, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.701715] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695273, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.779386] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695270, 'name': ReconfigVM_Task, 'duration_secs': 0.268026} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.779770] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.780086] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53bcb14f-7a54-4914-b096-b9ed6801dc5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.792971] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1382.792971] env[62820]: value = "task-1695274" [ 1382.792971] env[62820]: _type = "Task" [ 1382.792971] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.802014] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.976664] env[62820]: DEBUG oslo_vmware.api [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695271, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.036234] env[62820]: INFO nova.compute.manager [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Took 40.37 seconds to build instance. [ 1383.052338] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695272, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.071608] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695268, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728166} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.073518] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1383.075045] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1383.075045] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a55e878-b1bb-4528-aeea-4c057d97b58f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.084687] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1383.084687] env[62820]: value = "task-1695275" [ 1383.084687] env[62820]: _type = "Task" [ 1383.084687] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.098010] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.184087] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695269, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.193658] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695273, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.229465] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0aca464-44bd-4af3-a69d-59e4fd7c82fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.239261] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2e4779-67c7-4d99-9317-70c9d1534d43 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.279369] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8691ef02-7d15-4b53-af80-b49a626b740b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.283621] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Updated VIF entry in instance network info cache for port 5d1e82ae-c035-4664-9764-24afac8896b1. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1383.284120] env[62820]: DEBUG nova.network.neutron [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Updating instance_info_cache with network_info: [{"id": "5d1e82ae-c035-4664-9764-24afac8896b1", "address": "fa:16:3e:8b:75:20", "network": {"id": "6bdb14c5-5bf2-41e5-b7d6-56a2da43f416", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-781910607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3edacaf37e34169a73932db948fa6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c06e3c2-8edb-4cf0-be6b-45dfe059c00b", "external-id": "nsx-vlan-transportzone-264", "segmentation_id": 264, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d1e82ae-c0", "ovs_interfaceid": "5d1e82ae-c035-4664-9764-24afac8896b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.290941] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4dbce5-fd97-4e35-8e0b-07b9c958f5c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.312273] env[62820]: DEBUG nova.compute.provider_tree [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1383.318405] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695274, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.479421] env[62820]: DEBUG oslo_vmware.api [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695271, 'name': PowerOffVM_Task, 'duration_secs': 0.574287} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.479708] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1383.479873] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1383.480172] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80bcf1db-4148-4aab-82f6-44f37681228f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.538634] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7b621e94-fdd9-4a11-a23b-37203b07f78a tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "519c961c-557e-4796-88da-047c55d6be44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.933s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.550945] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1383.551265] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1383.551494] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Deleting the datastore file [datastore1] 846e8df9-b925-4d2e-a90e-4e774c35d0b4 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1383.552292] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f76c1eea-50d0-4b5e-901a-d181c3ee7dcb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.557905] env[62820]: DEBUG oslo_vmware.api [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695272, 'name': PowerOnVM_Task, 'duration_secs': 0.789295} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.558025] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1383.558271] env[62820]: INFO nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Took 7.42 seconds to spawn the instance on the hypervisor. [ 1383.558485] env[62820]: DEBUG nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1383.559540] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82836724-51bb-48d3-862e-8248158fc946 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.563610] env[62820]: DEBUG oslo_vmware.api [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for the task: (returnval){ [ 1383.563610] env[62820]: value = "task-1695277" [ 1383.563610] env[62820]: _type = "Task" [ 1383.563610] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.578134] env[62820]: DEBUG oslo_vmware.api [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.596915] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131811} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.596915] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1383.596915] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e44540c-cfbd-4438-afed-7a2a92c4302c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.618465] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1383.619940] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d6e42e3-d552-4ca8-87e5-938aea2062f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.642525] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1383.642525] env[62820]: value = "task-1695278" [ 1383.642525] env[62820]: _type = "Task" [ 1383.642525] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.653253] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695278, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.677597] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695269, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.692301] env[62820]: DEBUG oslo_vmware.api [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695273, 'name': PowerOnVM_Task, 'duration_secs': 0.54368} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.692682] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1383.692846] env[62820]: INFO nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Took 10.09 seconds to spawn the instance on the hypervisor. [ 1383.693166] env[62820]: DEBUG nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1383.693866] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95efc5f-4189-43df-90bb-915873dd09ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.788301] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3be3b28-7390-44a6-b877-6b6010567398 req-505b95d0-fd31-4a5b-8341-828b77ac2c2f service nova] Releasing lock "refresh_cache-4e4668ed-801a-4105-8b9e-cf37be91c8b8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.806974] env[62820]: DEBUG nova.compute.manager [req-7ed9d034-2178-4fe4-bdaa-d96d2bc5be36 req-99f97704-1dc3-47f9-9d7b-f5ffc211ae46 service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Received event network-changed-29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1383.807330] env[62820]: DEBUG nova.compute.manager [req-7ed9d034-2178-4fe4-bdaa-d96d2bc5be36 req-99f97704-1dc3-47f9-9d7b-f5ffc211ae46 service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Refreshing instance network info cache due to event network-changed-29363f02-2acb-4e52-8db8-f9743ec7fb99. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1383.807511] env[62820]: DEBUG oslo_concurrency.lockutils [req-7ed9d034-2178-4fe4-bdaa-d96d2bc5be36 req-99f97704-1dc3-47f9-9d7b-f5ffc211ae46 service nova] Acquiring lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.807650] env[62820]: DEBUG oslo_concurrency.lockutils [req-7ed9d034-2178-4fe4-bdaa-d96d2bc5be36 req-99f97704-1dc3-47f9-9d7b-f5ffc211ae46 service nova] Acquired lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.807802] env[62820]: DEBUG nova.network.neutron [req-7ed9d034-2178-4fe4-bdaa-d96d2bc5be36 req-99f97704-1dc3-47f9-9d7b-f5ffc211ae46 service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Refreshing network info cache for port 29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.812552] env[62820]: DEBUG oslo_vmware.api [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695274, 'name': PowerOnVM_Task, 'duration_secs': 0.571341} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.813034] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1383.815584] env[62820]: DEBUG nova.compute.manager [None req-e885756e-8f24-4a76-bf78-2db78372b6c5 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1383.816391] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b9049e-148d-419e-813f-60849b258d8d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.856928] env[62820]: DEBUG nova.scheduler.client.report [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 49 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1383.856928] env[62820]: DEBUG nova.compute.provider_tree [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 49 to 50 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1383.856928] env[62820]: DEBUG nova.compute.provider_tree [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1384.042037] env[62820]: DEBUG nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1384.087884] env[62820]: DEBUG oslo_vmware.api [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Task: {'id': task-1695277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395666} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.090476] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1384.090758] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1384.091326] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1384.091445] env[62820]: INFO nova.compute.manager [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1384.095025] env[62820]: DEBUG oslo.service.loopingcall [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1384.095025] env[62820]: INFO nova.compute.manager [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Took 38.67 seconds to build instance. [ 1384.095025] env[62820]: DEBUG nova.compute.manager [-] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1384.095025] env[62820]: DEBUG nova.network.neutron [-] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1384.158102] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695278, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.175181] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695269, 'name': CloneVM_Task, 'duration_secs': 1.680637} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.175496] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Created linked-clone VM from snapshot [ 1384.176265] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceea7b22-0800-4f85-befc-ed17a6250a4c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.186593] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Uploading image a6035f84-247a-4ce7-b343-7924ac889d33 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1384.215513] env[62820]: INFO nova.compute.manager [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Took 40.29 seconds to build instance. [ 1384.220658] env[62820]: DEBUG oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1384.220658] env[62820]: value = "vm-353472" [ 1384.220658] env[62820]: _type = "VirtualMachine" [ 1384.220658] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1384.220957] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-34ef859a-f705-4fb9-80e7-3959074de894 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.228210] env[62820]: DEBUG oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lease: (returnval){ [ 1384.228210] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bd0cdb-a538-c6f5-13ee-b6d38e30f08b" [ 1384.228210] env[62820]: _type = "HttpNfcLease" [ 1384.228210] env[62820]: } obtained for exporting VM: (result){ [ 1384.228210] env[62820]: value = "vm-353472" [ 1384.228210] env[62820]: _type = "VirtualMachine" [ 1384.228210] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1384.228737] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the lease: (returnval){ [ 1384.228737] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bd0cdb-a538-c6f5-13ee-b6d38e30f08b" [ 1384.228737] env[62820]: _type = "HttpNfcLease" [ 1384.228737] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1384.237482] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1384.237482] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bd0cdb-a538-c6f5-13ee-b6d38e30f08b" [ 1384.237482] env[62820]: _type = "HttpNfcLease" [ 1384.237482] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1384.362887] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.076s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.363604] env[62820]: DEBUG nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1384.369274] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.704s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.371297] env[62820]: INFO nova.compute.claims [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1384.585983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.597153] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aeb53629-c107-42a4-b618-4dfb2e87366b tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.418s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.624968] env[62820]: DEBUG nova.network.neutron [req-7ed9d034-2178-4fe4-bdaa-d96d2bc5be36 req-99f97704-1dc3-47f9-9d7b-f5ffc211ae46 service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Updated VIF entry in instance network info cache for port 29363f02-2acb-4e52-8db8-f9743ec7fb99. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1384.625087] env[62820]: DEBUG nova.network.neutron [req-7ed9d034-2178-4fe4-bdaa-d96d2bc5be36 req-99f97704-1dc3-47f9-9d7b-f5ffc211ae46 service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Updating instance_info_cache with network_info: [{"id": "29363f02-2acb-4e52-8db8-f9743ec7fb99", "address": "fa:16:3e:ef:6d:5b", "network": {"id": "637634a0-8c6d-4e06-945b-58f2e86d4edf", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2105723280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05ec4bd31e8941ee9fc67f16798a9227", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29363f02-2a", "ovs_interfaceid": "29363f02-2acb-4e52-8db8-f9743ec7fb99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.663692] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695278, 'name': ReconfigVM_Task, 'duration_secs': 0.624625} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.663692] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.663692] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c1d6d48-41da-4048-9f76-0e0aeff7a540 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.671275] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1384.671275] env[62820]: value = "task-1695280" [ 1384.671275] env[62820]: _type = "Task" [ 1384.671275] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.681792] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695280, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.719178] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b9fb7f34-a623-498f-b147-7edfaf296f97 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "58a26c98-cbf9-491f-8d2c-20281c3d7771" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.608s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.738620] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1384.738620] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bd0cdb-a538-c6f5-13ee-b6d38e30f08b" [ 1384.738620] env[62820]: _type = "HttpNfcLease" [ 1384.738620] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1384.740157] env[62820]: DEBUG oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1384.740157] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bd0cdb-a538-c6f5-13ee-b6d38e30f08b" [ 1384.740157] env[62820]: _type = "HttpNfcLease" [ 1384.740157] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1384.740157] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6022bb2a-c788-42be-b923-eab2dc4d90d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.749024] env[62820]: DEBUG oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a4ef63-3bd3-7eb9-58d9-1a674e59965b/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1384.749239] env[62820]: DEBUG oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a4ef63-3bd3-7eb9-58d9-1a674e59965b/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1384.752471] env[62820]: DEBUG nova.compute.manager [req-cebcf7a3-d098-42e9-9595-089d09df9f50 req-39f4efed-8c9d-4065-89ff-7a951e94ac94 service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Received event network-vif-deleted-712e639f-2aff-4915-9285-ea3d67b8e072 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1384.752665] env[62820]: INFO nova.compute.manager [req-cebcf7a3-d098-42e9-9595-089d09df9f50 req-39f4efed-8c9d-4065-89ff-7a951e94ac94 service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Neutron deleted interface 712e639f-2aff-4915-9285-ea3d67b8e072; detaching it from the instance and deleting it from the info cache [ 1384.752848] env[62820]: DEBUG nova.network.neutron [req-cebcf7a3-d098-42e9-9595-089d09df9f50 req-39f4efed-8c9d-4065-89ff-7a951e94ac94 service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.865048] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c0cb0f71-19ed-4220-9771-92b9863272c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.878533] env[62820]: DEBUG nova.compute.utils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1384.883026] env[62820]: DEBUG nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1384.883026] env[62820]: DEBUG nova.network.neutron [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1385.105825] env[62820]: DEBUG nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1385.128432] env[62820]: DEBUG oslo_concurrency.lockutils [req-7ed9d034-2178-4fe4-bdaa-d96d2bc5be36 req-99f97704-1dc3-47f9-9d7b-f5ffc211ae46 service nova] Releasing lock "refresh_cache-519c961c-557e-4796-88da-047c55d6be44" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.135602] env[62820]: DEBUG nova.policy [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e81a169ac4144a5bbc0a4e3a077cb4a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65abf73e789b48d3ba24e2660d7c0341', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1385.177244] env[62820]: DEBUG nova.network.neutron [-] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.186744] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695280, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.223641] env[62820]: DEBUG nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1385.314795] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5199259-faa2-441d-a9ca-ebdf4a0e2121 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.330564] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96efc8d-b5e0-4a97-992b-453e42c31a18 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.373664] env[62820]: DEBUG nova.compute.manager [req-cebcf7a3-d098-42e9-9595-089d09df9f50 req-39f4efed-8c9d-4065-89ff-7a951e94ac94 service nova] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Detach interface failed, port_id=712e639f-2aff-4915-9285-ea3d67b8e072, reason: Instance 846e8df9-b925-4d2e-a90e-4e774c35d0b4 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1385.387215] env[62820]: DEBUG nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1385.578024] env[62820]: DEBUG nova.network.neutron [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Successfully created port: e8ee995a-d8ee-4b9b-bb95-10e37f0a6313 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1385.628896] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.691316] env[62820]: INFO nova.compute.manager [-] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Took 1.60 seconds to deallocate network for instance. [ 1385.716537] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695280, 'name': Rename_Task, 'duration_secs': 0.91137} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.717115] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1385.717466] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d210cf4-c1ef-4663-a691-ccda23d8d9bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.726397] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1385.726397] env[62820]: value = "task-1695281" [ 1385.726397] env[62820]: _type = "Task" [ 1385.726397] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.744671] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "706d42cd-53d9-4976-bc67-98816a40fff4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.745276] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "706d42cd-53d9-4976-bc67-98816a40fff4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.750037] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695281, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.765680] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.979635] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb87904-3120-4706-89fc-85e7b80d9cc3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.989822] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60a2ce0-d96f-45fe-9e3c-8d2539a60ac9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.023639] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af022e4-f8dd-46a3-9808-06cbf5ef378a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.032048] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec1dd56-425c-44f8-84d0-d526efb62f91 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.048619] env[62820]: DEBUG nova.compute.provider_tree [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.204460] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.237704] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695281, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.408307] env[62820]: DEBUG nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1386.552134] env[62820]: DEBUG nova.scheduler.client.report [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1386.739710] env[62820]: DEBUG oslo_vmware.api [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695281, 'name': PowerOnVM_Task, 'duration_secs': 0.625712} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.740174] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1386.740489] env[62820]: INFO nova.compute.manager [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Took 6.48 seconds to spawn the instance on the hypervisor. [ 1386.740761] env[62820]: DEBUG nova.compute.manager [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1386.741915] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e4a4a3-ad7d-4046-b056-7175fe6669bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.058637] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.059193] env[62820]: DEBUG nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1387.061769] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.431s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.061989] env[62820]: DEBUG nova.objects.instance [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lazy-loading 'resources' on Instance uuid 42d00bd3-71fa-4c26-a544-489326163d88 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1387.189899] env[62820]: DEBUG nova.network.neutron [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Successfully updated port: e8ee995a-d8ee-4b9b-bb95-10e37f0a6313 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1387.265975] env[62820]: INFO nova.compute.manager [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Took 36.81 seconds to build instance. [ 1387.564939] env[62820]: DEBUG nova.compute.utils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1387.569318] env[62820]: DEBUG nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1387.569531] env[62820]: DEBUG nova.network.neutron [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1387.628351] env[62820]: DEBUG nova.policy [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48284dfde04d4d7d8d1ceb9a3204121b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03b3c8eaed13452eb00e8d97383df642', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1387.693173] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.693427] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.693645] env[62820]: DEBUG nova.network.neutron [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.768613] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6f6dd6ff-1f19-4e10-a435-0a5d81a17a21 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.759s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.915324] env[62820]: DEBUG nova.network.neutron [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Successfully created port: 95cd753f-d804-4914-8266-24e2348bfd8f {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1387.995179] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abbe658-4148-4e48-bbdf-a145b0fad76a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.003932] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a9718c-b829-4180-8b83-0d091195935a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.035518] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d655d85c-0a3c-4439-b45c-458982bf3e62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.044229] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63a45db-4383-416a-b2da-784c0e8359a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.058123] env[62820]: DEBUG nova.compute.provider_tree [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1388.070192] env[62820]: DEBUG nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1388.225793] env[62820]: DEBUG nova.network.neutron [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1388.271431] env[62820]: DEBUG nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1388.368860] env[62820]: DEBUG nova.network.neutron [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Updating instance_info_cache with network_info: [{"id": "e8ee995a-d8ee-4b9b-bb95-10e37f0a6313", "address": "fa:16:3e:87:9d:0a", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8ee995a-d8", "ovs_interfaceid": "e8ee995a-d8ee-4b9b-bb95-10e37f0a6313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.561926] env[62820]: DEBUG nova.scheduler.client.report [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1388.790346] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.871640] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.871961] env[62820]: DEBUG nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Instance network_info: |[{"id": "e8ee995a-d8ee-4b9b-bb95-10e37f0a6313", "address": "fa:16:3e:87:9d:0a", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8ee995a-d8", "ovs_interfaceid": "e8ee995a-d8ee-4b9b-bb95-10e37f0a6313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1389.067339] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.005s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.069915] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 24.782s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.080148] env[62820]: DEBUG nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1389.089163] env[62820]: INFO nova.scheduler.client.report [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Deleted allocations for instance 42d00bd3-71fa-4c26-a544-489326163d88 [ 1389.449189] env[62820]: DEBUG nova.network.neutron [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Successfully updated port: 95cd753f-d804-4914-8266-24e2348bfd8f {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1389.597121] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90ad54b7-23a8-4fd3-b1a4-348f36f37742 tempest-ServerDiagnosticsTest-529864616 tempest-ServerDiagnosticsTest-529864616-project-member] Lock "42d00bd3-71fa-4c26-a544-489326163d88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.690s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.953320] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "refresh_cache-9068670d-f323-4180-92f9-f19737e955e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.953858] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquired lock "refresh_cache-9068670d-f323-4180-92f9-f19737e955e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.953985] env[62820]: DEBUG nova.network.neutron [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1390.085966] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Applying migration context for instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 as it has an incoming, in-progress migration 17065b4d-ea93-42e5-aca0-e553248f0e35. Migration status is error {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1390.088383] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=62820) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1390.114049] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 069f58d6-f6bc-4ded-8274-6fed7c2f45b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.114049] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.114049] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 90ea0c16-739a-4132-ac36-e154a846b9c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.114332] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 2f917745-28ef-4dfe-8c09-45c15a80145d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.114332] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 9910a0ea-5ce0-41e9-b449-da729a4c3223 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.114441] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 9287b8eb-487d-4f51-9e7c-90c016a1c8e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.114532] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 846e8df9-b925-4d2e-a90e-4e774c35d0b4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1390.114654] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.114771] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a06d736c-a704-46e8-a6f7-85d8be40804f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.114904] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 93098210-ca91-41b4-9b12-96fa105a2ab3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1390.115045] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15e95a20-2729-46c6-a613-32aa353ed329 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1390.115166] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 6176f083-b61a-40d6-90a0-680b628a1e08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.115336] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.115540] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance cc2b0ed5-b711-487d-8bfc-ee2745c9ef89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.115674] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 519c961c-557e-4796-88da-047c55d6be44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.115788] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 58a26c98-cbf9-491f-8d2c-20281c3d7771 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.115903] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0eb62424-0ee6-4ff4-94c2-bb6a10861759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.116030] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 766dd26e-3866-4ef3-bd87-b81e5f6bc718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.116158] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance fdc57b8b-a6ab-4e6d-9db0-4054b022aeec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.116326] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 9068670d-f323-4180-92f9-f19737e955e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.495967] env[62820]: DEBUG nova.network.neutron [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1390.619167] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance d040f935-566b-4bbe-b9f6-379fd1dc1a91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1390.642670] env[62820]: DEBUG nova.network.neutron [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Updating instance_info_cache with network_info: [{"id": "95cd753f-d804-4914-8266-24e2348bfd8f", "address": "fa:16:3e:82:40:09", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95cd753f-d8", "ovs_interfaceid": "95cd753f-d804-4914-8266-24e2348bfd8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.122408] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.145787] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Releasing lock "refresh_cache-9068670d-f323-4180-92f9-f19737e955e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.146241] env[62820]: DEBUG nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Instance network_info: |[{"id": "95cd753f-d804-4914-8266-24e2348bfd8f", "address": "fa:16:3e:82:40:09", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95cd753f-d8", "ovs_interfaceid": "95cd753f-d804-4914-8266-24e2348bfd8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1391.529152] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1391.529934] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1391.529934] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1391.529934] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1391.529934] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1391.530221] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1391.530295] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1391.530452] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1391.530610] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1391.530775] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1391.530953] env[62820]: DEBUG nova.virt.hardware [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1391.533286] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d30ec6e-8921-4895-b5f8-c437861b790d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.542527] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803a8b0b-fb3c-44bd-a268-7c12997f65e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.548515] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1391.551148] env[62820]: DEBUG nova.virt.hardware [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1391.552086] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f589a0-5771-4a24-bf5b-b8b958aff7e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.567436] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16679b8c-e860-4d33-bf93-7232dfd275e2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.571540] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:9d:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8ee995a-d8ee-4b9b-bb95-10e37f0a6313', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1391.578880] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating folder: Project (65abf73e789b48d3ba24e2660d7c0341). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1391.581890] env[62820]: DEBUG oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292751d-c397-3603-ab75-e2378f24ecf5/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1391.581890] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e9eefb3-a847-4508-8df2-e25f611a2089 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.583681] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b017f1-2cfe-446b-81e1-510ba9f53e47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.590867] env[62820]: DEBUG oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292751d-c397-3603-ab75-e2378f24ecf5/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1391.591051] env[62820]: ERROR oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292751d-c397-3603-ab75-e2378f24ecf5/disk-0.vmdk due to incomplete transfer. [ 1391.599079] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-74e6f45e-5572-45dd-82b3-43176b0f4e30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.601069] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:40:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95cd753f-d804-4914-8266-24e2348bfd8f', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1391.608350] env[62820]: DEBUG oslo.service.loopingcall [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1391.609829] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1391.610111] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Created folder: Project (65abf73e789b48d3ba24e2660d7c0341) in parent group-v353379. [ 1391.610296] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating folder: Instances. Parent ref: group-v353473. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1391.610966] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edd80403-2e4f-4d02-af99-725829819782 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.625135] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0ac485b-5f1e-4178-8e74-c777076db033 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.628497] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.630051] env[62820]: DEBUG oslo_vmware.rw_handles [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292751d-c397-3603-ab75-e2378f24ecf5/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1391.630051] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Uploaded image 73e7e1b6-32b9-4a2f-84d9-2ee4537bfc3d to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1391.634080] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1391.634080] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ef0fc71d-0f8e-4c74-83d8-27d784000e9b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.640044] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1391.640044] env[62820]: value = "task-1695283" [ 1391.640044] env[62820]: _type = "Task" [ 1391.640044] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.640044] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1391.640044] env[62820]: value = "task-1695285" [ 1391.640044] env[62820]: _type = "Task" [ 1391.640044] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.649587] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Created folder: Instances in parent group-v353473. [ 1391.649834] env[62820]: DEBUG oslo.service.loopingcall [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1391.650316] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1391.650831] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3402f090-0e32-4b47-9e67-5c8c180a204f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.671394] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695283, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.671394] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695285, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.677683] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1391.677683] env[62820]: value = "task-1695286" [ 1391.677683] env[62820]: _type = "Task" [ 1391.677683] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.685851] env[62820]: DEBUG nova.compute.manager [req-70ec88c7-75ae-4966-9796-da655a3e47c0 req-acde9ea8-764b-43b6-8ecc-825e6aa690fa service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Received event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1391.686166] env[62820]: DEBUG nova.compute.manager [req-70ec88c7-75ae-4966-9796-da655a3e47c0 req-acde9ea8-764b-43b6-8ecc-825e6aa690fa service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing instance network info cache due to event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1391.686509] env[62820]: DEBUG oslo_concurrency.lockutils [req-70ec88c7-75ae-4966-9796-da655a3e47c0 req-acde9ea8-764b-43b6-8ecc-825e6aa690fa service nova] Acquiring lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.686739] env[62820]: DEBUG oslo_concurrency.lockutils [req-70ec88c7-75ae-4966-9796-da655a3e47c0 req-acde9ea8-764b-43b6-8ecc-825e6aa690fa service nova] Acquired lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.687742] env[62820]: DEBUG nova.network.neutron [req-70ec88c7-75ae-4966-9796-da655a3e47c0 req-acde9ea8-764b-43b6-8ecc-825e6aa690fa service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1391.696525] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695286, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.135374] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 06fb6034-e010-49bd-9e5e-7699a43dd5a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1392.153762] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695285, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.158199] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695283, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.160655] env[62820]: DEBUG nova.compute.manager [req-f41af174-57ef-4ced-be19-4e107fdb23e0 req-2ad0d935-2d53-4259-89e9-f8c5c47cb85e service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Received event network-changed-ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1392.160837] env[62820]: DEBUG nova.compute.manager [req-f41af174-57ef-4ced-be19-4e107fdb23e0 req-2ad0d935-2d53-4259-89e9-f8c5c47cb85e service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Refreshing instance network info cache due to event network-changed-ad48a330-41a2-437b-92eb-66a7086d8380. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1392.161077] env[62820]: DEBUG oslo_concurrency.lockutils [req-f41af174-57ef-4ced-be19-4e107fdb23e0 req-2ad0d935-2d53-4259-89e9-f8c5c47cb85e service nova] Acquiring lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.161312] env[62820]: DEBUG oslo_concurrency.lockutils [req-f41af174-57ef-4ced-be19-4e107fdb23e0 req-2ad0d935-2d53-4259-89e9-f8c5c47cb85e service nova] Acquired lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.161364] env[62820]: DEBUG nova.network.neutron [req-f41af174-57ef-4ced-be19-4e107fdb23e0 req-2ad0d935-2d53-4259-89e9-f8c5c47cb85e service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Refreshing network info cache for port ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1392.187876] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695286, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.403248] env[62820]: DEBUG nova.network.neutron [req-70ec88c7-75ae-4966-9796-da655a3e47c0 req-acde9ea8-764b-43b6-8ecc-825e6aa690fa service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updated VIF entry in instance network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1392.404121] env[62820]: DEBUG nova.network.neutron [req-70ec88c7-75ae-4966-9796-da655a3e47c0 req-acde9ea8-764b-43b6-8ecc-825e6aa690fa service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [{"id": "507956c3-f482-428d-b807-71f6d0ca9cb4", "address": "fa:16:3e:ca:1a:49", "network": {"id": "22889f2c-0fd9-4556-a2e7-8647c8c1faa6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1091669222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74f3dd3dcc10421f803a0039e3add051", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507956c3-f4", "ovs_interfaceid": "507956c3-f482-428d-b807-71f6d0ca9cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.644250] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance aa98dbb0-5ff7-4da5-a365-2b55a8bd2216 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1392.644250] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1392.661637] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695283, 'name': CreateVM_Task, 'duration_secs': 0.827006} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.665876] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1392.668208] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695285, 'name': Destroy_Task, 'duration_secs': 0.853963} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.668846] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.669040] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.669375] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1392.670913] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Destroyed the VM [ 1392.670913] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1392.670913] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-326121d4-41cc-4a1b-9d56-4bca086d8fc2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.673973] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2b2aaf0f-94dd-41ac-b787-1ae0b3135dc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.683572] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1392.683572] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5227bee0-9742-9245-ae72-f96e423ca2ad" [ 1392.683572] env[62820]: _type = "Task" [ 1392.683572] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.685667] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1392.685667] env[62820]: value = "task-1695287" [ 1392.685667] env[62820]: _type = "Task" [ 1392.685667] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.699084] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695286, 'name': CreateVM_Task, 'duration_secs': 0.722} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.699810] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1392.700741] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.704935] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695287, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.711248] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5227bee0-9742-9245-ae72-f96e423ca2ad, 'name': SearchDatastore_Task, 'duration_secs': 0.021287} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.711578] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.711844] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1392.712106] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.712253] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.712428] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.713452] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.713522] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1392.714039] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83c27d4b-f052-4170-bbde-c93d98e097d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.716289] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e34b72f2-2197-4a22-8074-0b7a1dbc077e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.722158] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1392.722158] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520d167e-6918-b4a0-d942-ba6535d05b07" [ 1392.722158] env[62820]: _type = "Task" [ 1392.722158] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.727735] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.727929] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1392.728982] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-029b454a-6bf4-4067-ada3-fa45d8dbb7c2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.734852] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520d167e-6918-b4a0-d942-ba6535d05b07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.738380] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1392.738380] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5298bcc7-202a-62bd-e1f0-08f941bc2fef" [ 1392.738380] env[62820]: _type = "Task" [ 1392.738380] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.747894] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5298bcc7-202a-62bd-e1f0-08f941bc2fef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.910723] env[62820]: DEBUG oslo_concurrency.lockutils [req-70ec88c7-75ae-4966-9796-da655a3e47c0 req-acde9ea8-764b-43b6-8ecc-825e6aa690fa service nova] Releasing lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.911361] env[62820]: INFO nova.compute.manager [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Rebuilding instance [ 1392.963345] env[62820]: DEBUG nova.compute.manager [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1392.964333] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49e4a8d-7798-4632-b5e3-58e33c426b8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.968186] env[62820]: DEBUG nova.network.neutron [req-f41af174-57ef-4ced-be19-4e107fdb23e0 req-2ad0d935-2d53-4259-89e9-f8c5c47cb85e service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Updated VIF entry in instance network info cache for port ad48a330-41a2-437b-92eb-66a7086d8380. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1392.968578] env[62820]: DEBUG nova.network.neutron [req-f41af174-57ef-4ced-be19-4e107fdb23e0 req-2ad0d935-2d53-4259-89e9-f8c5c47cb85e service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Updating instance_info_cache with network_info: [{"id": "ad48a330-41a2-437b-92eb-66a7086d8380", "address": "fa:16:3e:03:3f:d6", "network": {"id": "c199e1e0-d60c-4b9a-b659-b176db682d02", "bridge": "br-int", "label": "tempest-ServersTestJSON-850235590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ee4b77329e74ec3aa9f50bc0b53ffdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad48a330-41", "ovs_interfaceid": "ad48a330-41a2-437b-92eb-66a7086d8380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.150600] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance ab21fd61-3a44-42fa-92be-51214b0a9a1e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1393.202729] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695287, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.233351] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520d167e-6918-b4a0-d942-ba6535d05b07, 'name': SearchDatastore_Task, 'duration_secs': 0.012995} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.233567] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.233824] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1393.234034] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.248168] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5298bcc7-202a-62bd-e1f0-08f941bc2fef, 'name': SearchDatastore_Task, 'duration_secs': 0.011941} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.249074] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18bd91dc-bdfc-4af4-b2f7-d14cf4af6f9e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.255275] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1393.255275] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bd5af2-fa8b-aba3-7b81-8c9711877863" [ 1393.255275] env[62820]: _type = "Task" [ 1393.255275] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.258959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "6176f083-b61a-40d6-90a0-680b628a1e08" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.259216] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "6176f083-b61a-40d6-90a0-680b628a1e08" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.259415] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "6176f083-b61a-40d6-90a0-680b628a1e08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.259593] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "6176f083-b61a-40d6-90a0-680b628a1e08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.259754] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "6176f083-b61a-40d6-90a0-680b628a1e08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.261946] env[62820]: INFO nova.compute.manager [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Terminating instance [ 1393.266359] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bd5af2-fa8b-aba3-7b81-8c9711877863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.471853] env[62820]: DEBUG oslo_concurrency.lockutils [req-f41af174-57ef-4ced-be19-4e107fdb23e0 req-2ad0d935-2d53-4259-89e9-f8c5c47cb85e service nova] Releasing lock "refresh_cache-0eb62424-0ee6-4ff4-94c2-bb6a10861759" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.655503] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 31639194-b0c4-4eb9-a6f4-e61b067c807f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1393.704396] env[62820]: DEBUG oslo_vmware.api [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695287, 'name': RemoveSnapshot_Task, 'duration_secs': 0.637921} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.705400] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1393.705400] env[62820]: INFO nova.compute.manager [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Took 15.61 seconds to snapshot the instance on the hypervisor. [ 1393.767805] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bd5af2-fa8b-aba3-7b81-8c9711877863, 'name': SearchDatastore_Task, 'duration_secs': 0.01072} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.768104] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.768366] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9068670d-f323-4180-92f9-f19737e955e2/9068670d-f323-4180-92f9-f19737e955e2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1393.768987] env[62820]: DEBUG nova.compute.manager [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1393.769197] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1393.769515] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.769651] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1393.769860] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4c944d8-54d2-49aa-b7ed-55ee109a5887 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.772661] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc1a553-ae53-4e3c-a4f4-39d702d096d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.775406] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03e04533-15ab-42af-85dc-bdeba9f7941f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.783076] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.784701] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7c64bdf-89ad-4af9-a367-51a251f7fad3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.786627] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1393.786627] env[62820]: value = "task-1695288" [ 1393.786627] env[62820]: _type = "Task" [ 1393.786627] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.788877] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1393.788877] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1393.793559] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f077ce6-0d18-43cb-97b1-0c9fdc659c0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.803850] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1393.803850] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d9a1b6-1fe5-45a7-8c23-162522efc23e" [ 1393.803850] env[62820]: _type = "Task" [ 1393.803850] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.804392] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695288, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.816613] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d9a1b6-1fe5-45a7-8c23-162522efc23e, 'name': SearchDatastore_Task, 'duration_secs': 0.008995} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.816613] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db3139c4-9a06-432e-b713-6a02c30835d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.820853] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1393.820853] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5229c04d-1361-88fa-4510-84a2c200f0e4" [ 1393.820853] env[62820]: _type = "Task" [ 1393.820853] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.829413] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5229c04d-1361-88fa-4510-84a2c200f0e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.867692] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.868316] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.868621] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleting the datastore file [datastore1] 6176f083-b61a-40d6-90a0-680b628a1e08 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.869974] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-422786cf-9b3f-42ec-9bfc-c22bcf886779 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.875714] env[62820]: DEBUG oslo_vmware.api [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1393.875714] env[62820]: value = "task-1695290" [ 1393.875714] env[62820]: _type = "Task" [ 1393.875714] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.886306] env[62820]: DEBUG oslo_vmware.api [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.980715] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1393.981710] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef87e258-bf7a-4531-874c-682fc73ba83a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.991497] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1393.991497] env[62820]: value = "task-1695291" [ 1393.991497] env[62820]: _type = "Task" [ 1393.991497] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.006467] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.161671] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 262d0714-d7d7-443c-9927-ef03ba9f230e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1394.185111] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "f186854d-3f0a-4512-83b9-2c946247ccbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.185492] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "f186854d-3f0a-4512-83b9-2c946247ccbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.212583] env[62820]: DEBUG nova.compute.manager [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Instance disappeared during snapshot {{(pid=62820) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1394.223304] env[62820]: DEBUG nova.compute.manager [req-075640e3-0ead-4eed-84b6-b6ad458f6f9d req-6617ce0e-1acb-44ca-8ee8-4f71dff46b3d service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Received event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1394.223561] env[62820]: DEBUG nova.compute.manager [req-075640e3-0ead-4eed-84b6-b6ad458f6f9d req-6617ce0e-1acb-44ca-8ee8-4f71dff46b3d service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing instance network info cache due to event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1394.223738] env[62820]: DEBUG oslo_concurrency.lockutils [req-075640e3-0ead-4eed-84b6-b6ad458f6f9d req-6617ce0e-1acb-44ca-8ee8-4f71dff46b3d service nova] Acquiring lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.223783] env[62820]: DEBUG oslo_concurrency.lockutils [req-075640e3-0ead-4eed-84b6-b6ad458f6f9d req-6617ce0e-1acb-44ca-8ee8-4f71dff46b3d service nova] Acquired lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.223923] env[62820]: DEBUG nova.network.neutron [req-075640e3-0ead-4eed-84b6-b6ad458f6f9d req-6617ce0e-1acb-44ca-8ee8-4f71dff46b3d service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1394.236023] env[62820]: DEBUG nova.compute.manager [None req-6b3ad66f-8ed4-4b6b-ad02-4a29d9a48da9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image not found during clean up 73e7e1b6-32b9-4a2f-84d9-2ee4537bfc3d {{(pid=62820) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1394.305692] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695288, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.332497] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5229c04d-1361-88fa-4510-84a2c200f0e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009845} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.332767] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.333058] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] fdc57b8b-a6ab-4e6d-9db0-4054b022aeec/fdc57b8b-a6ab-4e6d-9db0-4054b022aeec.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1394.333325] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-405d1529-407f-4440-bb34-e30ec2110b15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.340610] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1394.340610] env[62820]: value = "task-1695292" [ 1394.340610] env[62820]: _type = "Task" [ 1394.340610] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.349447] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.385960] env[62820]: DEBUG oslo_vmware.api [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.469324} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.386256] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.386459] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.386646] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.386822] env[62820]: INFO nova.compute.manager [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1394.387079] env[62820]: DEBUG oslo.service.loopingcall [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1394.387575] env[62820]: DEBUG nova.compute.manager [-] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1394.387709] env[62820]: DEBUG nova.network.neutron [-] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1394.504831] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695291, 'name': PowerOffVM_Task, 'duration_secs': 0.199042} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.505229] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1394.505567] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1394.506456] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5778c055-991a-4d52-be0e-84d68875c119 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.517290] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1394.518117] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef16240f-64d5-424b-bb1e-71f5bf1c9d25 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.546284] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1394.546284] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1394.546284] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Deleting the datastore file [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1394.546284] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94324388-60c5-4e7b-b4fe-e0c52c3fdf74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.555531] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1394.555531] env[62820]: value = "task-1695294" [ 1394.555531] env[62820]: _type = "Task" [ 1394.555531] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.567392] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695294, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.666917] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 706d42cd-53d9-4976-bc67-98816a40fff4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1394.667366] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1394.668055] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3968MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1394.702266] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Received event network-vif-plugged-e8ee995a-d8ee-4b9b-bb95-10e37f0a6313 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1394.706041] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Acquiring lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.706041] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.706041] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.706041] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] No waiting events found dispatching network-vif-plugged-e8ee995a-d8ee-4b9b-bb95-10e37f0a6313 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1394.706041] env[62820]: WARNING nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Received unexpected event network-vif-plugged-e8ee995a-d8ee-4b9b-bb95-10e37f0a6313 for instance with vm_state building and task_state spawning. [ 1394.706041] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Received event network-changed-e8ee995a-d8ee-4b9b-bb95-10e37f0a6313 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1394.706041] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Refreshing instance network info cache due to event network-changed-e8ee995a-d8ee-4b9b-bb95-10e37f0a6313. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1394.706041] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Acquiring lock "refresh_cache-fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.706041] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Acquired lock "refresh_cache-fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.706041] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Refreshing network info cache for port e8ee995a-d8ee-4b9b-bb95-10e37f0a6313 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1394.800024] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695288, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547996} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.802571] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9068670d-f323-4180-92f9-f19737e955e2/9068670d-f323-4180-92f9-f19737e955e2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1394.802841] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1394.803673] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6024f31-ee38-463d-9721-77d1e5e0fdbd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.812666] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1394.812666] env[62820]: value = "task-1695295" [ 1394.812666] env[62820]: _type = "Task" [ 1394.812666] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.824534] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695295, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.851434] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457749} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.851756] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] fdc57b8b-a6ab-4e6d-9db0-4054b022aeec/fdc57b8b-a6ab-4e6d-9db0-4054b022aeec.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1394.852010] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1394.852342] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b19de787-459b-4a2f-b737-a0aae535e6bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.860356] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1394.860356] env[62820]: value = "task-1695296" [ 1394.860356] env[62820]: _type = "Task" [ 1394.860356] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.872213] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.069259] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695294, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294257} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.069575] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1395.073347] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1395.073459] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1395.131098] env[62820]: DEBUG nova.network.neutron [req-075640e3-0ead-4eed-84b6-b6ad458f6f9d req-6617ce0e-1acb-44ca-8ee8-4f71dff46b3d service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updated VIF entry in instance network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1395.131451] env[62820]: DEBUG nova.network.neutron [req-075640e3-0ead-4eed-84b6-b6ad458f6f9d req-6617ce0e-1acb-44ca-8ee8-4f71dff46b3d service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [{"id": "507956c3-f482-428d-b807-71f6d0ca9cb4", "address": "fa:16:3e:ca:1a:49", "network": {"id": "22889f2c-0fd9-4556-a2e7-8647c8c1faa6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1091669222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74f3dd3dcc10421f803a0039e3add051", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507956c3-f4", "ovs_interfaceid": "507956c3-f482-428d-b807-71f6d0ca9cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.196844] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4088e93f-5548-4990-bad1-f690604e26ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.204721] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80286a5b-b2c6-44c2-8da5-c74a4e935971 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.238999] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe28e37e-ef7f-485b-84fe-f8e00d2fa7b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.241620] env[62820]: DEBUG nova.network.neutron [-] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.248125] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a43a26-fcc9-4f4a-af94-82d53517e2d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.267022] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.320880] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695295, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06797} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.321171] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1395.321959] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ff99ea-3d3b-4c00-98a5-d6f049410702 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.345753] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 9068670d-f323-4180-92f9-f19737e955e2/9068670d-f323-4180-92f9-f19737e955e2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1395.346502] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7749a139-12c0-4071-81c2-0033e319006b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.371919] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1395.371919] env[62820]: value = "task-1695297" [ 1395.371919] env[62820]: _type = "Task" [ 1395.371919] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.377845] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093436} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.378432] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1395.379374] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e767ac9-4e1b-4e79-be5d-7c2d1068071c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.384699] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695297, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.406079] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] fdc57b8b-a6ab-4e6d-9db0-4054b022aeec/fdc57b8b-a6ab-4e6d-9db0-4054b022aeec.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1395.408925] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22cb8d00-3327-41b1-951d-3b98a107a379 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.432063] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1395.432063] env[62820]: value = "task-1695298" [ 1395.432063] env[62820]: _type = "Task" [ 1395.432063] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.440013] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695298, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.451268] env[62820]: DEBUG oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a4ef63-3bd3-7eb9-58d9-1a674e59965b/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1395.452166] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd3535c-baf4-4f2a-9408-fa0ee030a7c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.458260] env[62820]: DEBUG oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a4ef63-3bd3-7eb9-58d9-1a674e59965b/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1395.458422] env[62820]: ERROR oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a4ef63-3bd3-7eb9-58d9-1a674e59965b/disk-0.vmdk due to incomplete transfer. [ 1395.458651] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5606043e-b0c0-420e-ad20-a8a1cdd75c8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.465093] env[62820]: DEBUG oslo_vmware.rw_handles [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a4ef63-3bd3-7eb9-58d9-1a674e59965b/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1395.465294] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Uploaded image a6035f84-247a-4ce7-b343-7924ac889d33 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1395.467022] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1395.467259] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-aa8f3b81-e6aa-4ad4-90c2-48f3de405c61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.473886] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1395.473886] env[62820]: value = "task-1695299" [ 1395.473886] env[62820]: _type = "Task" [ 1395.473886] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.483038] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695299, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.571262] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Updated VIF entry in instance network info cache for port e8ee995a-d8ee-4b9b-bb95-10e37f0a6313. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1395.571707] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Updating instance_info_cache with network_info: [{"id": "e8ee995a-d8ee-4b9b-bb95-10e37f0a6313", "address": "fa:16:3e:87:9d:0a", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8ee995a-d8", "ovs_interfaceid": "e8ee995a-d8ee-4b9b-bb95-10e37f0a6313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.633492] env[62820]: DEBUG oslo_concurrency.lockutils [req-075640e3-0ead-4eed-84b6-b6ad458f6f9d req-6617ce0e-1acb-44ca-8ee8-4f71dff46b3d service nova] Releasing lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.744608] env[62820]: INFO nova.compute.manager [-] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Took 1.36 seconds to deallocate network for instance. [ 1395.767494] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1395.883951] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695297, 'name': ReconfigVM_Task, 'duration_secs': 0.289578} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.884371] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 9068670d-f323-4180-92f9-f19737e955e2/9068670d-f323-4180-92f9-f19737e955e2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1395.885068] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bb7541a-9a6a-4c23-8356-cc908b12f42d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.892084] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1395.892084] env[62820]: value = "task-1695300" [ 1395.892084] env[62820]: _type = "Task" [ 1395.892084] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.900759] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695300, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.942659] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695298, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.986266] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695299, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.076517] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Releasing lock "refresh_cache-fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.076517] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Received event network-vif-plugged-95cd753f-d804-4914-8266-24e2348bfd8f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1396.076517] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Acquiring lock "9068670d-f323-4180-92f9-f19737e955e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.076835] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Lock "9068670d-f323-4180-92f9-f19737e955e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.079476] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Lock "9068670d-f323-4180-92f9-f19737e955e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.080488] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] No waiting events found dispatching network-vif-plugged-95cd753f-d804-4914-8266-24e2348bfd8f {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1396.081773] env[62820]: WARNING nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Received unexpected event network-vif-plugged-95cd753f-d804-4914-8266-24e2348bfd8f for instance with vm_state building and task_state spawning. [ 1396.081773] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Received event network-changed-95cd753f-d804-4914-8266-24e2348bfd8f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1396.081773] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Refreshing instance network info cache due to event network-changed-95cd753f-d804-4914-8266-24e2348bfd8f. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1396.081773] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Acquiring lock "refresh_cache-9068670d-f323-4180-92f9-f19737e955e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.084064] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Acquired lock "refresh_cache-9068670d-f323-4180-92f9-f19737e955e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.084064] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Refreshing network info cache for port 95cd753f-d804-4914-8266-24e2348bfd8f {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.118324] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1396.119329] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1396.119648] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1396.120881] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1396.121210] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1396.121495] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1396.121844] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1396.122645] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1396.122949] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1396.123533] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1396.123859] env[62820]: DEBUG nova.virt.hardware [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1396.125680] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f17592-8854-4e2b-944f-67bdff1d9138 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.134818] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12193d39-c3cd-4457-aa88-a2f5c64ba7c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.155670] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1396.161581] env[62820]: DEBUG oslo.service.loopingcall [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.162030] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1396.162365] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-814408ed-1966-45fb-8a2a-f1f6e97599a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.181873] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.181873] env[62820]: value = "task-1695301" [ 1396.181873] env[62820]: _type = "Task" [ 1396.181873] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.193184] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695301, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.251446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.278023] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1396.278023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.206s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.278023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.696s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.278023] env[62820]: INFO nova.compute.claims [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1396.382463] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.382692] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.382842] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.404921] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695300, 'name': Rename_Task, 'duration_secs': 0.137764} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.406078] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1396.406078] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0af63960-82e0-4836-aee7-9ec9f243e1ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.418036] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1396.418036] env[62820]: value = "task-1695302" [ 1396.418036] env[62820]: _type = "Task" [ 1396.418036] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.429883] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.443923] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695298, 'name': ReconfigVM_Task, 'duration_secs': 0.721238} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.444305] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Reconfigured VM instance instance-0000001c to attach disk [datastore1] fdc57b8b-a6ab-4e6d-9db0-4054b022aeec/fdc57b8b-a6ab-4e6d-9db0-4054b022aeec.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1396.444988] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17950037-729e-4311-b1f4-c7ec66aacb47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.453035] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1396.453035] env[62820]: value = "task-1695303" [ 1396.453035] env[62820]: _type = "Task" [ 1396.453035] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.461180] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695303, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.484991] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695299, 'name': Destroy_Task, 'duration_secs': 0.942358} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.484991] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Destroyed the VM [ 1396.485152] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1396.485297] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dd110548-5444-4ec5-8633-a02130c384f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.491726] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1396.491726] env[62820]: value = "task-1695304" [ 1396.491726] env[62820]: _type = "Task" [ 1396.491726] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.500248] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695304, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.629545] env[62820]: DEBUG nova.compute.manager [req-769f823c-705f-4a5b-8f0e-677ed981a79d req-176cffb3-78a8-4e34-aa65-abd9412497f7 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Received event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1396.629683] env[62820]: DEBUG nova.compute.manager [req-769f823c-705f-4a5b-8f0e-677ed981a79d req-176cffb3-78a8-4e34-aa65-abd9412497f7 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing instance network info cache due to event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1396.629897] env[62820]: DEBUG oslo_concurrency.lockutils [req-769f823c-705f-4a5b-8f0e-677ed981a79d req-176cffb3-78a8-4e34-aa65-abd9412497f7 service nova] Acquiring lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.630147] env[62820]: DEBUG oslo_concurrency.lockutils [req-769f823c-705f-4a5b-8f0e-677ed981a79d req-176cffb3-78a8-4e34-aa65-abd9412497f7 service nova] Acquired lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.630243] env[62820]: DEBUG nova.network.neutron [req-769f823c-705f-4a5b-8f0e-677ed981a79d req-176cffb3-78a8-4e34-aa65-abd9412497f7 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.691281] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695301, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.931401] env[62820]: DEBUG oslo_vmware.api [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695302, 'name': PowerOnVM_Task, 'duration_secs': 0.490974} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.931883] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1396.932258] env[62820]: INFO nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Took 7.85 seconds to spawn the instance on the hypervisor. [ 1396.932610] env[62820]: DEBUG nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1396.933630] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0db7d39-7ac4-47fd-817c-00cfec299254 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.962742] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695303, 'name': Rename_Task, 'duration_secs': 0.139788} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.963214] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1396.963594] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-206ab78b-3496-4e8a-9a0a-e388e4041bf2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.972014] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1396.972014] env[62820]: value = "task-1695305" [ 1396.972014] env[62820]: _type = "Task" [ 1396.972014] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.979862] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.009142] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695304, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.090755] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Updated VIF entry in instance network info cache for port 95cd753f-d804-4914-8266-24e2348bfd8f. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.090989] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Updating instance_info_cache with network_info: [{"id": "95cd753f-d804-4914-8266-24e2348bfd8f", "address": "fa:16:3e:82:40:09", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95cd753f-d8", "ovs_interfaceid": "95cd753f-d804-4914-8266-24e2348bfd8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.196024] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695301, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.327934] env[62820]: DEBUG nova.compute.manager [req-dc17edbb-8d74-4db6-a953-87a0f85c0eea req-2babb8ce-995e-4190-9211-f7279d7dd1e4 service nova] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Received event network-vif-deleted-a033acfd-d3ec-4c0f-a248-fb38c4e3533b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1397.426804] env[62820]: DEBUG nova.network.neutron [req-769f823c-705f-4a5b-8f0e-677ed981a79d req-176cffb3-78a8-4e34-aa65-abd9412497f7 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updated VIF entry in instance network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.426804] env[62820]: DEBUG nova.network.neutron [req-769f823c-705f-4a5b-8f0e-677ed981a79d req-176cffb3-78a8-4e34-aa65-abd9412497f7 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [{"id": "507956c3-f482-428d-b807-71f6d0ca9cb4", "address": "fa:16:3e:ca:1a:49", "network": {"id": "22889f2c-0fd9-4556-a2e7-8647c8c1faa6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1091669222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74f3dd3dcc10421f803a0039e3add051", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507956c3-f4", "ovs_interfaceid": "507956c3-f482-428d-b807-71f6d0ca9cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.451365] env[62820]: INFO nova.compute.manager [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Took 38.81 seconds to build instance. [ 1397.483743] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695305, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.504919] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695304, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.594173] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Releasing lock "refresh_cache-9068670d-f323-4180-92f9-f19737e955e2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.594440] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Received event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1397.594611] env[62820]: DEBUG nova.compute.manager [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing instance network info cache due to event network-changed-507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1397.594799] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Acquiring lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.680743] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.680919] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1397.699184] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695301, 'name': CreateVM_Task, 'duration_secs': 1.362687} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.702635] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.703323] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.703561] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.703859] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1397.704642] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eedc6c07-a719-4798-ba9c-4ba3bc0bd206 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.709604] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1397.709604] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52035820-2345-5594-0463-c9d9c22a4489" [ 1397.709604] env[62820]: _type = "Task" [ 1397.709604] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.722365] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52035820-2345-5594-0463-c9d9c22a4489, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.810020] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace577ee-57d7-4835-ae18-b133e357d68d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.815652] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf57de1-a81a-4ba9-9be6-3bf2daec08f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.847291] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfdb725-82e1-4999-bc76-08df419fc4aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.854728] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20199c7-2d81-4eb8-8cd2-ee14c07d400b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.868785] env[62820]: DEBUG nova.compute.provider_tree [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.928279] env[62820]: DEBUG oslo_concurrency.lockutils [req-769f823c-705f-4a5b-8f0e-677ed981a79d req-176cffb3-78a8-4e34-aa65-abd9412497f7 service nova] Releasing lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.928710] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Acquired lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.928904] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Refreshing network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.953673] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84ee1973-89ea-4f81-acd7-aec7aed45f95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "9068670d-f323-4180-92f9-f19737e955e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.282s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.981226] env[62820]: DEBUG oslo_vmware.api [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695305, 'name': PowerOnVM_Task, 'duration_secs': 0.565294} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.981443] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1397.981629] env[62820]: INFO nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Took 11.57 seconds to spawn the instance on the hypervisor. [ 1397.981803] env[62820]: DEBUG nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1397.982579] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cf34b1-29bc-492b-8e41-9e25e6056cc1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.003191] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695304, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.176916] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.177264] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.177486] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.177672] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.177864] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.183379] env[62820]: INFO nova.compute.manager [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Terminating instance [ 1398.219842] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52035820-2345-5594-0463-c9d9c22a4489, 'name': SearchDatastore_Task, 'duration_secs': 0.012501} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.220380] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.220621] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1398.220894] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.221101] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.221353] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.221640] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f988f7af-2ee2-42a2-b102-4e3426fa8bd7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.230401] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1398.230680] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1398.231618] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f1f78a6-09f3-4d41-b1cc-bf891315bf48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.238053] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1398.238053] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527b5754-14b3-e8bf-c57d-ffa1fbfbb1ec" [ 1398.238053] env[62820]: _type = "Task" [ 1398.238053] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.247618] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527b5754-14b3-e8bf-c57d-ffa1fbfbb1ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.372470] env[62820]: DEBUG nova.scheduler.client.report [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1398.456447] env[62820]: DEBUG nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1398.501758] env[62820]: INFO nova.compute.manager [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Took 43.21 seconds to build instance. [ 1398.511713] env[62820]: DEBUG oslo_vmware.api [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695304, 'name': RemoveSnapshot_Task, 'duration_secs': 1.800767} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.512594] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1398.512841] env[62820]: INFO nova.compute.manager [None req-49051540-9d6f-43bf-b69f-1c45e1eb0455 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Took 18.61 seconds to snapshot the instance on the hypervisor. [ 1398.683827] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1398.684119] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1398.687969] env[62820]: DEBUG nova.compute.manager [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1398.688193] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1398.689366] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce4d152-7464-4db8-b5c6-53a9d5300c3b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.697551] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1398.699804] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5223ad5c-c191-45bb-aa68-f1807768e285 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.706074] env[62820]: DEBUG oslo_vmware.api [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1398.706074] env[62820]: value = "task-1695306" [ 1398.706074] env[62820]: _type = "Task" [ 1398.706074] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.714237] env[62820]: DEBUG oslo_vmware.api [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.750207] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527b5754-14b3-e8bf-c57d-ffa1fbfbb1ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010705} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.750815] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1da8ebb5-6a71-40f5-bda0-3e0342ea598c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.754545] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updated VIF entry in instance network info cache for port 507956c3-f482-428d-b807-71f6d0ca9cb4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.754896] env[62820]: DEBUG nova.network.neutron [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [{"id": "507956c3-f482-428d-b807-71f6d0ca9cb4", "address": "fa:16:3e:ca:1a:49", "network": {"id": "22889f2c-0fd9-4556-a2e7-8647c8c1faa6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1091669222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74f3dd3dcc10421f803a0039e3add051", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507956c3-f4", "ovs_interfaceid": "507956c3-f482-428d-b807-71f6d0ca9cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.760243] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1398.760243] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52005ff6-be6d-704f-f7a4-a89bd6ba1ee0" [ 1398.760243] env[62820]: _type = "Task" [ 1398.760243] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.768003] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52005ff6-be6d-704f-f7a4-a89bd6ba1ee0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.881484] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.605s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.882097] env[62820]: DEBUG nova.compute.manager [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1398.884682] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.962s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.886142] env[62820]: INFO nova.compute.claims [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1398.982257] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.992959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "9068670d-f323-4180-92f9-f19737e955e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.993293] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "9068670d-f323-4180-92f9-f19737e955e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.993595] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "9068670d-f323-4180-92f9-f19737e955e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.993822] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "9068670d-f323-4180-92f9-f19737e955e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.994062] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "9068670d-f323-4180-92f9-f19737e955e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.996487] env[62820]: INFO nova.compute.manager [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Terminating instance [ 1399.004593] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05a770b7-b2f6-4916-828a-8c21241d455b tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.777s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.215933] env[62820]: DEBUG oslo_vmware.api [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695306, 'name': PowerOffVM_Task, 'duration_secs': 0.359132} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.216375] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1399.216553] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1399.216810] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d58cb699-26cc-488c-a098-29140833d762 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.258626] env[62820]: DEBUG oslo_concurrency.lockutils [req-7bf47da5-69db-4363-9567-7983692ae744 req-57447f03-0e86-4ce5-a9cb-b5fe72e4cf89 service nova] Releasing lock "refresh_cache-cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.270791] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52005ff6-be6d-704f-f7a4-a89bd6ba1ee0, 'name': SearchDatastore_Task, 'duration_secs': 0.011193} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.271027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.271252] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1399.271520] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a51b5455-fdbc-4d50-94f4-609782d9464b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.280462] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1399.280462] env[62820]: value = "task-1695308" [ 1399.280462] env[62820]: _type = "Task" [ 1399.280462] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.292889] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.394088] env[62820]: DEBUG nova.compute.utils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1399.397883] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1399.398555] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1399.398555] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Deleting the datastore file [datastore1] cc2b0ed5-b711-487d-8bfc-ee2745c9ef89 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1399.398804] env[62820]: DEBUG nova.compute.manager [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Not allocating networking since 'none' was specified. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1399.399241] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f3c0737-ffdb-461d-a094-4cb621eadbca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.406261] env[62820]: DEBUG oslo_vmware.api [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for the task: (returnval){ [ 1399.406261] env[62820]: value = "task-1695309" [ 1399.406261] env[62820]: _type = "Task" [ 1399.406261] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.419640] env[62820]: DEBUG oslo_vmware.api [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695309, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.501842] env[62820]: DEBUG nova.compute.manager [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1399.502720] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1399.503087] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fa082f-4329-429d-bfca-8d52bf260b8a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.508412] env[62820]: DEBUG nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1399.516672] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1399.516975] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b42b00a-ab87-4afa-a69f-bd01de4e3f8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.528028] env[62820]: DEBUG oslo_vmware.api [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1399.528028] env[62820]: value = "task-1695310" [ 1399.528028] env[62820]: _type = "Task" [ 1399.528028] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.536568] env[62820]: DEBUG oslo_vmware.api [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695310, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.680652] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.680864] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1399.796217] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695308, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.892333] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.892711] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.892966] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.893786] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.893786] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.895742] env[62820]: INFO nova.compute.manager [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Terminating instance [ 1399.897850] env[62820]: DEBUG nova.compute.manager [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1399.917183] env[62820]: DEBUG oslo_vmware.api [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Task: {'id': task-1695309, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.48091} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.917465] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1399.917653] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1399.917843] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1399.918082] env[62820]: INFO nova.compute.manager [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1399.918343] env[62820]: DEBUG oslo.service.loopingcall [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.918541] env[62820]: DEBUG nova.compute.manager [-] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1399.918639] env[62820]: DEBUG nova.network.neutron [-] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1400.032434] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.038493] env[62820]: DEBUG oslo_vmware.api [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695310, 'name': PowerOffVM_Task, 'duration_secs': 0.391154} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.039014] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1400.039264] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1400.039532] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f083380-b128-4dc2-9c20-fa3d27a8287f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.130668] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1400.130935] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1400.132237] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Deleting the datastore file [datastore1] 9068670d-f323-4180-92f9-f19737e955e2 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1400.132237] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ebdb3de-5771-4c05-8881-fa1c7850fda4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.137686] env[62820]: DEBUG oslo_vmware.api [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for the task: (returnval){ [ 1400.137686] env[62820]: value = "task-1695312" [ 1400.137686] env[62820]: _type = "Task" [ 1400.137686] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.147389] env[62820]: DEBUG oslo_vmware.api [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.214627] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.214878] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.215035] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1400.297172] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552345} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.297483] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1400.297713] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1400.298031] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a222939-1c5e-4c75-ac6b-f73e3cedeb58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.308637] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1400.308637] env[62820]: value = "task-1695313" [ 1400.308637] env[62820]: _type = "Task" [ 1400.308637] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.316144] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.386445] env[62820]: DEBUG nova.compute.manager [req-72294ef2-03ff-4105-9ba6-809587bab170 req-9be232f2-870b-49d8-b615-d3060d14dbea service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Received event network-vif-deleted-507956c3-f482-428d-b807-71f6d0ca9cb4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1400.387108] env[62820]: INFO nova.compute.manager [req-72294ef2-03ff-4105-9ba6-809587bab170 req-9be232f2-870b-49d8-b615-d3060d14dbea service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Neutron deleted interface 507956c3-f482-428d-b807-71f6d0ca9cb4; detaching it from the instance and deleting it from the info cache [ 1400.387108] env[62820]: DEBUG nova.network.neutron [req-72294ef2-03ff-4105-9ba6-809587bab170 req-9be232f2-870b-49d8-b615-d3060d14dbea service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.406563] env[62820]: DEBUG nova.compute.manager [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1400.406828] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1400.413024] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4255112-2f19-44e4-8fe4-c58f86741065 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.423598] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1400.424263] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4124fa84-6156-4682-a585-b161a1c7910e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.431135] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01089f1f-c55c-4c77-8b48-3a6b3ea05bc0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.432019] env[62820]: DEBUG oslo_vmware.api [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1400.432019] env[62820]: value = "task-1695314" [ 1400.432019] env[62820]: _type = "Task" [ 1400.432019] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.441967] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4b451e-3b74-4965-a892-57630c1269be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.448701] env[62820]: DEBUG oslo_vmware.api [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.478231] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711c3772-1891-4718-8d98-c435a2f1f83d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.486288] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05489482-697a-4a04-aa1c-2f0fca4ec809 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.500697] env[62820]: DEBUG nova.compute.provider_tree [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.648823] env[62820]: DEBUG oslo_vmware.api [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Task: {'id': task-1695312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232284} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.649181] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1400.649250] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1400.649461] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1400.650500] env[62820]: INFO nova.compute.manager [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1400.650500] env[62820]: DEBUG oslo.service.loopingcall [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1400.650500] env[62820]: DEBUG nova.compute.manager [-] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1400.650500] env[62820]: DEBUG nova.network.neutron [-] [instance: 9068670d-f323-4180-92f9-f19737e955e2] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1400.819353] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073834} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.820237] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1400.821830] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496d8ff4-c2e9-4e7a-9951-0d23925e7c75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.848283] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1400.848792] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bae72a9-0f5d-4f1c-85f7-800caedfb352 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.863878] env[62820]: DEBUG nova.network.neutron [-] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.873300] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1400.873300] env[62820]: value = "task-1695315" [ 1400.873300] env[62820]: _type = "Task" [ 1400.873300] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.882049] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695315, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.890890] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00fc7d65-50a2-4e69-9859-8d43e2f1b1a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.901109] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff36374e-25d8-4d55-80b6-db90a4494a16 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.918286] env[62820]: DEBUG nova.compute.manager [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1400.937838] env[62820]: DEBUG nova.compute.manager [req-72294ef2-03ff-4105-9ba6-809587bab170 req-9be232f2-870b-49d8-b615-d3060d14dbea service nova] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Detach interface failed, port_id=507956c3-f482-428d-b807-71f6d0ca9cb4, reason: Instance cc2b0ed5-b711-487d-8bfc-ee2745c9ef89 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1400.950099] env[62820]: DEBUG oslo_vmware.api [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695314, 'name': PowerOffVM_Task, 'duration_secs': 0.348501} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.950465] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1400.950829] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1400.951142] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8745293c-c791-4df0-ac41-ceee1b57c1ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.955891] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1400.956210] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1400.956210] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1400.956952] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1400.956952] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1400.956952] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1400.956952] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1400.956952] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1400.957168] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1400.957197] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1400.959517] env[62820]: DEBUG nova.virt.hardware [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1400.959517] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df6ab72-15fd-410c-ad8f-e10574ccfb89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.968149] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75bf2dcc-a69d-4b58-995d-b83e3fcecdaa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.983544] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1400.990440] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Creating folder: Project (091aa5695e574c8f832005afa1f9ca36). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1400.991428] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-020e4602-2e5b-4578-b39a-48ffd4a7a7c2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.001925] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Created folder: Project (091aa5695e574c8f832005afa1f9ca36) in parent group-v353379. [ 1401.002178] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Creating folder: Instances. Parent ref: group-v353478. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1401.002441] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36c129be-a97a-4239-8992-71b22ff50a88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.004936] env[62820]: DEBUG nova.scheduler.client.report [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1401.016880] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Created folder: Instances in parent group-v353478. [ 1401.017104] env[62820]: DEBUG oslo.service.loopingcall [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1401.017284] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1401.017560] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-107ec78f-64d0-4ea3-9099-e10539bef41f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.034628] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1401.034871] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1401.035100] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleting the datastore file [datastore1] fdc57b8b-a6ab-4e6d-9db0-4054b022aeec {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1401.035773] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-807c7f45-7891-43e2-a6a8-a2ece1fa9067 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.039201] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1401.039201] env[62820]: value = "task-1695319" [ 1401.039201] env[62820]: _type = "Task" [ 1401.039201] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.043271] env[62820]: DEBUG oslo_vmware.api [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1401.043271] env[62820]: value = "task-1695320" [ 1401.043271] env[62820]: _type = "Task" [ 1401.043271] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.048916] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695319, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.053772] env[62820]: DEBUG oslo_vmware.api [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695320, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.372931] env[62820]: INFO nova.compute.manager [-] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Took 1.45 seconds to deallocate network for instance. [ 1401.383831] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695315, 'name': ReconfigVM_Task, 'duration_secs': 0.274505} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.386214] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1401.387057] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f284ad96-06b9-4d2b-8693-97c549cb429d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.395693] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1401.395693] env[62820]: value = "task-1695321" [ 1401.395693] env[62820]: _type = "Task" [ 1401.395693] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.411914] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695321, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.474147] env[62820]: DEBUG nova.network.neutron [-] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.511737] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.515778] env[62820]: DEBUG nova.compute.manager [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1401.516694] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.485s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.517170] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.519542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.754s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.521293] env[62820]: INFO nova.compute.claims [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1401.554291] env[62820]: DEBUG oslo_vmware.api [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695320, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168686} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.560334] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1401.560778] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1401.561088] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1401.561366] env[62820]: INFO nova.compute.manager [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1401.562898] env[62820]: DEBUG oslo.service.loopingcall [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1401.562898] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695319, 'name': CreateVM_Task, 'duration_secs': 0.306898} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.564472] env[62820]: INFO nova.scheduler.client.report [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Deleted allocations for instance 15e95a20-2729-46c6-a613-32aa353ed329 [ 1401.565587] env[62820]: DEBUG nova.compute.manager [-] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1401.566182] env[62820]: DEBUG nova.network.neutron [-] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1401.569022] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1401.572454] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.572727] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.573339] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1401.574574] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5710d796-da4e-47ce-93f9-871dd0595354 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.580339] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1401.580339] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cdad39-0baa-33eb-2a4a-c77411753507" [ 1401.580339] env[62820]: _type = "Task" [ 1401.580339] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.590992] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cdad39-0baa-33eb-2a4a-c77411753507, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.593024] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Updating instance_info_cache with network_info: [{"id": "7f1b810c-dc19-4971-a532-bdac241941cf", "address": "fa:16:3e:05:4b:bd", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f1b810c-dc", "ovs_interfaceid": "7f1b810c-dc19-4971-a532-bdac241941cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.615505] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.615881] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.616529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.616529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.616783] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.622107] env[62820]: INFO nova.compute.manager [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Terminating instance [ 1401.836467] env[62820]: DEBUG nova.compute.manager [req-6ddde1db-c95b-478c-85a6-7bc3850660a6 req-293223d2-248c-49da-aded-e2f0cfe29304 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Received event network-vif-deleted-e8ee995a-d8ee-4b9b-bb95-10e37f0a6313 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1401.836738] env[62820]: INFO nova.compute.manager [req-6ddde1db-c95b-478c-85a6-7bc3850660a6 req-293223d2-248c-49da-aded-e2f0cfe29304 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Neutron deleted interface e8ee995a-d8ee-4b9b-bb95-10e37f0a6313; detaching it from the instance and deleting it from the info cache [ 1401.836890] env[62820]: DEBUG nova.network.neutron [req-6ddde1db-c95b-478c-85a6-7bc3850660a6 req-293223d2-248c-49da-aded-e2f0cfe29304 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.880095] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.906102] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695321, 'name': Rename_Task, 'duration_secs': 0.167841} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.906102] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1401.906954] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2d39b36-0967-419f-8366-8f7b60bc1387 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.912808] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1401.912808] env[62820]: value = "task-1695322" [ 1401.912808] env[62820]: _type = "Task" [ 1401.912808] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.921251] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695322, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.975797] env[62820]: INFO nova.compute.manager [-] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Took 1.33 seconds to deallocate network for instance. [ 1402.029020] env[62820]: DEBUG nova.compute.utils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1402.033338] env[62820]: DEBUG nova.compute.manager [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Not allocating networking since 'none' was specified. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1402.081145] env[62820]: DEBUG oslo_concurrency.lockutils [None req-82b0be07-43d3-4d51-ace7-e2a200a92b0d tempest-ServerTagsTestJSON-1243776301 tempest-ServerTagsTestJSON-1243776301-project-member] Lock "15e95a20-2729-46c6-a613-32aa353ed329" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.759s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.095414] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cdad39-0baa-33eb-2a4a-c77411753507, 'name': SearchDatastore_Task, 'duration_secs': 0.011547} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.095767] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.096024] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1402.096234] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.096371] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.097452] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1402.097452] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-069f58d6-f6bc-4ded-8274-6fed7c2f45b3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.097452] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 1402.097452] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12ff1314-ab18-40fe-8a2c-87023b90434f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.099384] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.099495] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.108407] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1402.108590] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1402.109569] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a205d7d-fbed-4acc-8724-99bdad80dc33 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.115898] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1402.115898] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529ec826-89a0-793e-1196-05e9b2df9a5f" [ 1402.115898] env[62820]: _type = "Task" [ 1402.115898] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.124162] env[62820]: DEBUG nova.compute.manager [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1402.124372] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1402.124883] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529ec826-89a0-793e-1196-05e9b2df9a5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.125624] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6494f15f-0889-407f-a75f-e40d3fe30dca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.132091] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.132331] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7e7f5b8-1af0-405f-a61e-ebed58837e60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.138351] env[62820]: DEBUG oslo_vmware.api [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1402.138351] env[62820]: value = "task-1695323" [ 1402.138351] env[62820]: _type = "Task" [ 1402.138351] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.145942] env[62820]: DEBUG oslo_vmware.api [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.309984] env[62820]: DEBUG nova.network.neutron [-] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.340036] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76cd8e23-3fd0-40b4-80cd-3705fd0b076f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.350248] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b85528f-e42e-4e43-9821-e25b7f273f97 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.391395] env[62820]: DEBUG nova.compute.manager [req-6ddde1db-c95b-478c-85a6-7bc3850660a6 req-293223d2-248c-49da-aded-e2f0cfe29304 service nova] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Detach interface failed, port_id=e8ee995a-d8ee-4b9b-bb95-10e37f0a6313, reason: Instance fdc57b8b-a6ab-4e6d-9db0-4054b022aeec could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1402.430023] env[62820]: DEBUG oslo_vmware.api [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695322, 'name': PowerOnVM_Task, 'duration_secs': 0.468981} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.430023] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1402.430023] env[62820]: DEBUG nova.compute.manager [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1402.430023] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a8833c-d255-4764-81d9-0f5333a9e2f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.441425] env[62820]: DEBUG nova.compute.manager [req-06b447ea-d30e-4c34-aed1-6b888e4dcce3 req-58eb52b6-c2ad-464b-8376-259e2c391867 service nova] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Received event network-vif-deleted-95cd753f-d804-4914-8266-24e2348bfd8f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1402.485868] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.534974] env[62820]: DEBUG nova.compute.manager [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1402.603692] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.634235] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529ec826-89a0-793e-1196-05e9b2df9a5f, 'name': SearchDatastore_Task, 'duration_secs': 0.010723} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.635087] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74017532-5d62-4c7a-891a-dfdcf9f276ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.647128] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1402.647128] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52537305-3d24-7189-5eca-850b69550026" [ 1402.647128] env[62820]: _type = "Task" [ 1402.647128] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.658650] env[62820]: DEBUG oslo_vmware.api [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695323, 'name': PowerOffVM_Task, 'duration_secs': 0.160973} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.659734] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.660091] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.660505] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dccd3e1-fbc2-44ae-9cd2-b1426464215d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.666791] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52537305-3d24-7189-5eca-850b69550026, 'name': SearchDatastore_Task, 'duration_secs': 0.009771} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.667525] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.667846] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] d040f935-566b-4bbe-b9f6-379fd1dc1a91/d040f935-566b-4bbe-b9f6-379fd1dc1a91.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1402.668472] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a8fb6c9-78e3-4c83-a6ad-15b33a66beb2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.676121] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1402.676121] env[62820]: value = "task-1695325" [ 1402.676121] env[62820]: _type = "Task" [ 1402.676121] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.688755] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.736030] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1402.736030] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1402.736158] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Deleting the datastore file [datastore1] 9287b8eb-487d-4f51-9e7c-90c016a1c8e2 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1402.736536] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-930e1c64-cdc7-4030-85f2-c0c5ca77eeb3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.743423] env[62820]: DEBUG oslo_vmware.api [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1402.743423] env[62820]: value = "task-1695326" [ 1402.743423] env[62820]: _type = "Task" [ 1402.743423] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.754881] env[62820]: DEBUG oslo_vmware.api [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.813592] env[62820]: INFO nova.compute.manager [-] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Took 1.25 seconds to deallocate network for instance. [ 1402.953188] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.997552] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f115d09-99c1-4ea4-b9a6-9be5994539f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.011585] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43713c9-efb9-450a-bb5e-55713cbfc213 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.057753] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c85786f-ce35-4953-8654-50e68270bee9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.068962] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f364baad-0328-488f-bfcc-cbd17803bae4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.085158] env[62820]: DEBUG nova.compute.provider_tree [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1403.186248] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47633} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.186600] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] d040f935-566b-4bbe-b9f6-379fd1dc1a91/d040f935-566b-4bbe-b9f6-379fd1dc1a91.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1403.186745] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1403.187013] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65e665dd-6047-4deb-b2d6-163a1c2287d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.193749] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1403.193749] env[62820]: value = "task-1695327" [ 1403.193749] env[62820]: _type = "Task" [ 1403.193749] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.204358] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695327, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.253688] env[62820]: DEBUG oslo_vmware.api [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.437094} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.253972] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1403.254406] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1403.254607] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1403.254786] env[62820]: INFO nova.compute.manager [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1403.255039] env[62820]: DEBUG oslo.service.loopingcall [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1403.255239] env[62820]: DEBUG nova.compute.manager [-] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1403.255598] env[62820]: DEBUG nova.network.neutron [-] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1403.327599] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.558711] env[62820]: DEBUG nova.compute.manager [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1403.604582] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1403.604946] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1403.605231] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1403.605632] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1403.605824] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1403.606382] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1403.606617] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1403.606776] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1403.606942] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1403.607116] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1403.607290] env[62820]: DEBUG nova.virt.hardware [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1403.608225] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6098c7da-0bc8-4d19-a6d8-0c943ab21cf2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.611939] env[62820]: ERROR nova.scheduler.client.report [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [req-80f4bf44-8bd3-446b-b3b9-c4a1523c3a13] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-80f4bf44-8bd3-446b-b3b9-c4a1523c3a13"}]} [ 1403.620835] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab2edd8-dcff-4b0d-b051-907745ed9d93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.637543] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1403.644247] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Creating folder: Project (60bc6aa351d24aca8058afeccf5fa71f). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1403.645891] env[62820]: DEBUG nova.scheduler.client.report [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1403.648038] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-992de198-4bcc-45bb-983f-7f5fb114d39c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.663182] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Created folder: Project (60bc6aa351d24aca8058afeccf5fa71f) in parent group-v353379. [ 1403.663401] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Creating folder: Instances. Parent ref: group-v353481. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1403.663681] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df883e05-2ff5-4c70-a6a5-aba0d8f76b40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.676128] env[62820]: DEBUG nova.scheduler.client.report [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1403.676128] env[62820]: DEBUG nova.compute.provider_tree [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1403.681673] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Created folder: Instances in parent group-v353481. [ 1403.681673] env[62820]: DEBUG oslo.service.loopingcall [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1403.681673] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1403.681673] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90eddcc9-e928-4015-b7d8-9735820aafcd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.697601] env[62820]: DEBUG nova.scheduler.client.report [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1403.709218] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695327, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.217633} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.710768] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1403.711084] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1403.711084] env[62820]: value = "task-1695330" [ 1403.711084] env[62820]: _type = "Task" [ 1403.711084] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.712040] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747ceaad-8d54-4a52-aecc-8a04255236a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.718946] env[62820]: DEBUG nova.scheduler.client.report [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1403.725928] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695330, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.742249] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] d040f935-566b-4bbe-b9f6-379fd1dc1a91/d040f935-566b-4bbe-b9f6-379fd1dc1a91.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1403.742559] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb0ae83b-6875-419d-9596-653b19d061bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.765151] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1403.765151] env[62820]: value = "task-1695331" [ 1403.765151] env[62820]: _type = "Task" [ 1403.765151] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.772975] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695331, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.998913] env[62820]: DEBUG nova.compute.manager [req-e2813b25-19e5-4ab3-9415-14fc3d800aad req-e288aea5-1c65-4e02-bcb8-f7ed416a96d2 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Received event network-vif-deleted-71bc98ce-e716-4517-ade6-5d17b8a032e9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1403.998913] env[62820]: INFO nova.compute.manager [req-e2813b25-19e5-4ab3-9415-14fc3d800aad req-e288aea5-1c65-4e02-bcb8-f7ed416a96d2 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Neutron deleted interface 71bc98ce-e716-4517-ade6-5d17b8a032e9; detaching it from the instance and deleting it from the info cache [ 1403.998913] env[62820]: DEBUG nova.network.neutron [req-e2813b25-19e5-4ab3-9415-14fc3d800aad req-e288aea5-1c65-4e02-bcb8-f7ed416a96d2 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.211314] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb2ea56-213f-49c8-8791-ce92484d9ebc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.225624] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11edfcd3-ab3d-4c32-be79-763be9aea146 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.229296] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695330, 'name': CreateVM_Task, 'duration_secs': 0.474239} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.229504] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1404.230325] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.230507] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.230828] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1404.231088] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8c08b87-70c2-4403-9516-0766c10aab4c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.258396] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c65e95-9471-4c81-8401-6663431dec8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.263197] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1404.263197] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520060d2-0420-c703-6a43-63e166421478" [ 1404.263197] env[62820]: _type = "Task" [ 1404.263197] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.273032] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9efce9-e8c2-4717-9241-7e55b77f5b5b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.281723] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520060d2-0420-c703-6a43-63e166421478, 'name': SearchDatastore_Task, 'duration_secs': 0.010878} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.282628] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.282879] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.283115] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.283264] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.283435] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.287362] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d28fdf73-560d-4d06-bba5-8b2b7e6d7ed1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.292465] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695331, 'name': ReconfigVM_Task, 'duration_secs': 0.515219} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.302305] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Reconfigured VM instance instance-0000001e to attach disk [datastore1] d040f935-566b-4bbe-b9f6-379fd1dc1a91/d040f935-566b-4bbe-b9f6-379fd1dc1a91.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1404.302305] env[62820]: DEBUG nova.compute.provider_tree [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1404.304559] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca57cdfe-943a-4e3f-8c18-653eaf3a576c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.310189] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.310413] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1404.312710] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a588ee06-ef79-46d1-aa51-204f9f488164 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.315311] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1404.315311] env[62820]: value = "task-1695332" [ 1404.315311] env[62820]: _type = "Task" [ 1404.315311] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.320780] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1404.320780] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c0da54-d058-cb0c-c860-178e2fd700fd" [ 1404.320780] env[62820]: _type = "Task" [ 1404.320780] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.327492] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695332, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.332742] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c0da54-d058-cb0c-c860-178e2fd700fd, 'name': SearchDatastore_Task, 'duration_secs': 0.008168} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.333544] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30fa5aac-9730-4568-8daf-344ad9b77894 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.338664] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1404.338664] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ae05c1-9200-fa28-60e0-ea0a723363f6" [ 1404.338664] env[62820]: _type = "Task" [ 1404.338664] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.346414] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ae05c1-9200-fa28-60e0-ea0a723363f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.410501] env[62820]: DEBUG nova.network.neutron [-] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.499858] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fa66958-5cb3-4422-a17f-7da3cdb5259d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.509575] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcba9d1-d274-4e0e-8290-6720354c5d78 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.542481] env[62820]: DEBUG nova.compute.manager [req-e2813b25-19e5-4ab3-9415-14fc3d800aad req-e288aea5-1c65-4e02-bcb8-f7ed416a96d2 service nova] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Detach interface failed, port_id=71bc98ce-e716-4517-ade6-5d17b8a032e9, reason: Instance 9287b8eb-487d-4f51-9e7c-90c016a1c8e2 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1404.794127] env[62820]: INFO nova.compute.manager [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Rebuilding instance [ 1404.846689] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695332, 'name': Rename_Task, 'duration_secs': 0.124999} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.849940] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.850263] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0aa10ebf-c11a-4f7f-b7bf-47d33add6069 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.861882] env[62820]: DEBUG nova.scheduler.client.report [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 52 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1404.862192] env[62820]: DEBUG nova.compute.provider_tree [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 52 to 53 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1404.862402] env[62820]: DEBUG nova.compute.provider_tree [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1404.879973] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ae05c1-9200-fa28-60e0-ea0a723363f6, 'name': SearchDatastore_Task, 'duration_secs': 0.008593} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.879973] env[62820]: DEBUG nova.compute.manager [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1404.879973] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.880393] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1/4ab0bb5c-259d-4419-9c7d-ed3086efdcb1.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1404.881199] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5366b7d2-5c9f-4932-84dd-1dc60e2012ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.887023] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1404.887023] env[62820]: value = "task-1695333" [ 1404.887023] env[62820]: _type = "Task" [ 1404.887023] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.887023] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4d90fb8-92ae-41e2-b6b9-c51c21503bba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.896473] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1404.896473] env[62820]: value = "task-1695334" [ 1404.896473] env[62820]: _type = "Task" [ 1404.896473] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.900775] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695333, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.908793] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.912654] env[62820]: INFO nova.compute.manager [-] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Took 1.66 seconds to deallocate network for instance. [ 1405.371550] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.852s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.371973] env[62820]: DEBUG nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1405.376533] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.407s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.378093] env[62820]: INFO nova.compute.claims [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1405.403294] env[62820]: DEBUG oslo_vmware.api [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695333, 'name': PowerOnVM_Task, 'duration_secs': 0.472375} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.410118] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.410118] env[62820]: INFO nova.compute.manager [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Took 4.49 seconds to spawn the instance on the hypervisor. [ 1405.410311] env[62820]: DEBUG nova.compute.manager [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1405.411900] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2545698-27ef-46dc-9259-82185bb1965a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.427029] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.427323] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48791} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.430939] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1/4ab0bb5c-259d-4419-9c7d-ed3086efdcb1.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1405.431192] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1405.434564] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8aa72289-e4e3-43da-9896-8b263109f2e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.442175] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1405.442175] env[62820]: value = "task-1695335" [ 1405.442175] env[62820]: _type = "Task" [ 1405.442175] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.452739] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.879299] env[62820]: DEBUG nova.compute.utils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1405.880948] env[62820]: DEBUG nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1405.883236] env[62820]: DEBUG nova.network.neutron [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1405.911543] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1405.912178] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7799dad8-f75a-49e6-b491-d9bdafb86762 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.921188] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1405.921188] env[62820]: value = "task-1695336" [ 1405.921188] env[62820]: _type = "Task" [ 1405.921188] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.933364] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.944156] env[62820]: DEBUG nova.policy [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1405.951838] env[62820]: INFO nova.compute.manager [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Took 41.40 seconds to build instance. [ 1405.956498] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.230464} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.956498] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1405.956948] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4cb663-9112-48f2-be58-a6739dc8a0ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.978360] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1/4ab0bb5c-259d-4419-9c7d-ed3086efdcb1.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1405.979249] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce30c0db-536e-499a-ae97-29d6d4a70bff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.003582] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1406.003582] env[62820]: value = "task-1695337" [ 1406.003582] env[62820]: _type = "Task" [ 1406.003582] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.013281] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695337, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.298833] env[62820]: DEBUG nova.network.neutron [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Successfully created port: e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1406.384246] env[62820]: DEBUG nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1406.438379] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695336, 'name': PowerOffVM_Task, 'duration_secs': 0.217614} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.438544] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1406.439027] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1406.439963] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e6df25-b822-4d26-9d42-b6e5d06f47bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.448416] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1406.450577] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32aa988f-df93-411f-9f91-e7b521d9909e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.458732] env[62820]: DEBUG oslo_concurrency.lockutils [None req-41342032-b4e9-420e-b48c-c630184e401a tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.967s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.475482] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1406.475482] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1406.475482] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Deleting the datastore file [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1406.475482] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70a2b51a-b99b-4fe2-aff2-efcb55446966 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.484486] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1406.484486] env[62820]: value = "task-1695339" [ 1406.484486] env[62820]: _type = "Task" [ 1406.484486] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.494106] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695339, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.515134] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695337, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.705632] env[62820]: DEBUG nova.compute.manager [None req-cad6e50d-0d41-4b76-8f8c-cd6cc4b49165 tempest-ServerDiagnosticsV248Test-819063366 tempest-ServerDiagnosticsV248Test-819063366-project-admin] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1406.706351] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2f506d-2cf5-4d75-a1f2-0bbd18d33748 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.720438] env[62820]: INFO nova.compute.manager [None req-cad6e50d-0d41-4b76-8f8c-cd6cc4b49165 tempest-ServerDiagnosticsV248Test-819063366 tempest-ServerDiagnosticsV248Test-819063366-project-admin] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Retrieving diagnostics [ 1406.722826] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3069ebab-d024-4374-96fb-a12dc7412b19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.976298] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da799ebd-01e8-4387-9a88-2e4ad97cd8e5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.985287] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3badf61-9fdc-4704-8dfe-0cac0f42b64e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.027898] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695339, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088075} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.031757] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502eab6e-26d3-4287-b847-23786d44c1df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.033739] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1407.033932] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1407.034139] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1407.045820] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bb2b90-9d37-4d7a-a0b6-7ca679d6acd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.048647] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695337, 'name': ReconfigVM_Task, 'duration_secs': 0.990128} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.048647] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1/4ab0bb5c-259d-4419-9c7d-ed3086efdcb1.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1407.049387] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bf515fe-e112-463c-a53b-1f47b644c981 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.058673] env[62820]: DEBUG nova.compute.provider_tree [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1407.067814] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1407.067814] env[62820]: value = "task-1695340" [ 1407.067814] env[62820]: _type = "Task" [ 1407.067814] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.075976] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695340, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.405788] env[62820]: DEBUG nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1407.434427] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1407.435406] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1407.435406] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1407.435406] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1407.435406] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1407.435691] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1407.435691] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1407.435883] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1407.436353] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1407.437112] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1407.437112] env[62820]: DEBUG nova.virt.hardware [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1407.437710] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7011ba8-f76c-4a6e-a423-f57a187662b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.446196] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f05b4ac-b898-497e-b043-dbf9e1d16462 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.563441] env[62820]: DEBUG nova.scheduler.client.report [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1407.580273] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695340, 'name': Rename_Task, 'duration_secs': 0.141488} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.580453] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1407.581397] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea81ea1e-b503-4ccf-b07b-949eac49b060 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.590286] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1407.590286] env[62820]: value = "task-1695341" [ 1407.590286] env[62820]: _type = "Task" [ 1407.590286] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.598177] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.783238] env[62820]: DEBUG nova.compute.manager [req-59e3513f-9b07-4460-a252-0a0b56d3bd50 req-88d49d14-7fce-4cf7-8b3a-d67549d2d4e8 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received event network-vif-plugged-e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1407.783437] env[62820]: DEBUG oslo_concurrency.lockutils [req-59e3513f-9b07-4460-a252-0a0b56d3bd50 req-88d49d14-7fce-4cf7-8b3a-d67549d2d4e8 service nova] Acquiring lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.783663] env[62820]: DEBUG oslo_concurrency.lockutils [req-59e3513f-9b07-4460-a252-0a0b56d3bd50 req-88d49d14-7fce-4cf7-8b3a-d67549d2d4e8 service nova] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.783828] env[62820]: DEBUG oslo_concurrency.lockutils [req-59e3513f-9b07-4460-a252-0a0b56d3bd50 req-88d49d14-7fce-4cf7-8b3a-d67549d2d4e8 service nova] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.783998] env[62820]: DEBUG nova.compute.manager [req-59e3513f-9b07-4460-a252-0a0b56d3bd50 req-88d49d14-7fce-4cf7-8b3a-d67549d2d4e8 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] No waiting events found dispatching network-vif-plugged-e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1407.784338] env[62820]: WARNING nova.compute.manager [req-59e3513f-9b07-4460-a252-0a0b56d3bd50 req-88d49d14-7fce-4cf7-8b3a-d67549d2d4e8 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received unexpected event network-vif-plugged-e5e97928-d469-42c4-9621-ed449eeebf5c for instance with vm_state building and task_state spawning. [ 1407.893566] env[62820]: DEBUG nova.network.neutron [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Successfully updated port: e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1408.072335] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1408.072696] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1408.072770] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1408.072970] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1408.073073] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1408.073192] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1408.073395] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1408.073719] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1408.073788] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1408.073905] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1408.074087] env[62820]: DEBUG nova.virt.hardware [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1408.075034] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.075281] env[62820]: DEBUG nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1408.078274] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314864d8-e15d-46fa-b0ad-e8bdb3a8cb22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.082047] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.247s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.082047] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.083445] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.499s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.084952] env[62820]: INFO nova.compute.claims [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.096136] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9134d005-226e-4d81-8f88-0b264eb804e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.106048] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695341, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.114055] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1408.119648] env[62820]: DEBUG oslo.service.loopingcall [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1408.120632] env[62820]: INFO nova.scheduler.client.report [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleted allocations for instance 93098210-ca91-41b4-9b12-96fa105a2ab3 [ 1408.122141] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1408.124284] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7928add3-2899-48a9-b79a-8f460cc33a15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.141502] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1408.141502] env[62820]: value = "task-1695342" [ 1408.141502] env[62820]: _type = "Task" [ 1408.141502] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.148994] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695342, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.396616] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1408.396784] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.396943] env[62820]: DEBUG nova.network.neutron [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1408.583191] env[62820]: DEBUG nova.compute.utils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1408.584546] env[62820]: DEBUG nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1408.584715] env[62820]: DEBUG nova.network.neutron [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1408.604899] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695341, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.641381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9f09204b-77a1-478a-b864-b226eeb36f4e tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "93098210-ca91-41b4-9b12-96fa105a2ab3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.784s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.648878] env[62820]: DEBUG nova.policy [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8c2178bcc3840289572040f52c139a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70596f9fef8b41d5a570a8ca3c474700', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1408.654394] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695342, 'name': CreateVM_Task, 'duration_secs': 0.26283} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.654558] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1408.655302] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1408.655894] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.656287] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1408.657628] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1858a365-a1ef-4c61-ae7b-f78521757fc2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.663864] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1408.663864] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5246e9de-7cfb-1aac-5e1c-9c46233d10cc" [ 1408.663864] env[62820]: _type = "Task" [ 1408.663864] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.674839] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5246e9de-7cfb-1aac-5e1c-9c46233d10cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.964686] env[62820]: DEBUG nova.network.neutron [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1409.016412] env[62820]: DEBUG nova.network.neutron [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Successfully created port: e926a317-6604-4398-a77f-420aaa352075 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1409.091020] env[62820]: DEBUG nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1409.114244] env[62820]: DEBUG oslo_vmware.api [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695341, 'name': PowerOnVM_Task, 'duration_secs': 1.402964} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.114447] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1409.114593] env[62820]: INFO nova.compute.manager [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Took 5.56 seconds to spawn the instance on the hypervisor. [ 1409.114768] env[62820]: DEBUG nova.compute.manager [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1409.116611] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fa0846-ef03-4236-9f65-b5792b3a4675 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.174311] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5246e9de-7cfb-1aac-5e1c-9c46233d10cc, 'name': SearchDatastore_Task, 'duration_secs': 0.011067} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.174311] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.174552] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1409.174753] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.174965] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.175175] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1409.175425] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f242b741-b866-41da-882e-0680cea1bade {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.189097] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1409.189211] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1409.189906] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9c083c0-1618-4aee-a76a-76a6ab29906f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.198465] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1409.198465] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5201cf61-e665-3c2b-a223-30082209f860" [ 1409.198465] env[62820]: _type = "Task" [ 1409.198465] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.207499] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5201cf61-e665-3c2b-a223-30082209f860, 'name': SearchDatastore_Task, 'duration_secs': 0.009333} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.210434] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b769e7ce-298b-4575-b8cc-6f07f1fd267b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.213689] env[62820]: DEBUG nova.network.neutron [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updating instance_info_cache with network_info: [{"id": "e5e97928-d469-42c4-9621-ed449eeebf5c", "address": "fa:16:3e:ec:4d:db", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5e97928-d4", "ovs_interfaceid": "e5e97928-d469-42c4-9621-ed449eeebf5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.217482] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1409.217482] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b52788-810c-7592-d9a7-e87831c3a983" [ 1409.217482] env[62820]: _type = "Task" [ 1409.217482] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.227532] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b52788-810c-7592-d9a7-e87831c3a983, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.556299] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ef7af8-b474-4dce-9a4a-f3ac1bb939b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.563964] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69357e62-d99d-4e96-892a-d03f5155ffea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.601891] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ffe5fd-600f-4f5f-bf82-118b426a88e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.610731] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9081379d-5cdd-4c42-b924-81286429bf75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.624777] env[62820]: DEBUG nova.compute.provider_tree [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1409.644454] env[62820]: INFO nova.compute.manager [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Took 39.74 seconds to build instance. [ 1409.716183] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.716547] env[62820]: DEBUG nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Instance network_info: |[{"id": "e5e97928-d469-42c4-9621-ed449eeebf5c", "address": "fa:16:3e:ec:4d:db", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5e97928-d4", "ovs_interfaceid": "e5e97928-d469-42c4-9621-ed449eeebf5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1409.717037] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:4d:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da623279-b6f6-4570-8b15-a332120b8b60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5e97928-d469-42c4-9621-ed449eeebf5c', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1409.724508] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Creating folder: Project (8d4dc6b875b5420d87321f79b04bde9b). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1409.724836] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67df2fd5-b371-40cf-bb20-fe6a46157102 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.736387] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b52788-810c-7592-d9a7-e87831c3a983, 'name': SearchDatastore_Task, 'duration_secs': 0.008749} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.736639] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.736914] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1409.738123] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e5d68c3-b363-4478-9eca-691db853c385 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.739892] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Created folder: Project (8d4dc6b875b5420d87321f79b04bde9b) in parent group-v353379. [ 1409.740079] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Creating folder: Instances. Parent ref: group-v353485. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1409.740294] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ab8d934-93e5-4f32-b628-1085f1d91b27 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.746735] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1409.746735] env[62820]: value = "task-1695345" [ 1409.746735] env[62820]: _type = "Task" [ 1409.746735] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.750833] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Created folder: Instances in parent group-v353485. [ 1409.751067] env[62820]: DEBUG oslo.service.loopingcall [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1409.751551] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1409.751748] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf3f4604-ac77-4748-a64b-4a5c5d158b91 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.770547] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.775158] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1409.775158] env[62820]: value = "task-1695346" [ 1409.775158] env[62820]: _type = "Task" [ 1409.775158] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.782531] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695346, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.811499] env[62820]: DEBUG nova.compute.manager [req-3982b44b-5fd0-4d1b-932c-6632244cd66f req-f3668747-f555-42a1-9d94-7379cfd4a754 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received event network-changed-e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1409.811699] env[62820]: DEBUG nova.compute.manager [req-3982b44b-5fd0-4d1b-932c-6632244cd66f req-f3668747-f555-42a1-9d94-7379cfd4a754 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Refreshing instance network info cache due to event network-changed-e5e97928-d469-42c4-9621-ed449eeebf5c. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1409.811914] env[62820]: DEBUG oslo_concurrency.lockutils [req-3982b44b-5fd0-4d1b-932c-6632244cd66f req-f3668747-f555-42a1-9d94-7379cfd4a754 service nova] Acquiring lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.812084] env[62820]: DEBUG oslo_concurrency.lockutils [req-3982b44b-5fd0-4d1b-932c-6632244cd66f req-f3668747-f555-42a1-9d94-7379cfd4a754 service nova] Acquired lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.812239] env[62820]: DEBUG nova.network.neutron [req-3982b44b-5fd0-4d1b-932c-6632244cd66f req-f3668747-f555-42a1-9d94-7379cfd4a754 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Refreshing network info cache for port e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1410.107298] env[62820]: DEBUG nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1410.128857] env[62820]: DEBUG nova.scheduler.client.report [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1410.143767] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cb57f999-2920-4a08-a0f8-a4bb504222b1 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.112s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.147583] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1410.149580] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1410.151213] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1410.151213] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1410.151213] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1410.151326] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1410.152020] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1410.152301] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1410.152553] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1410.152801] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1410.153081] env[62820]: DEBUG nova.virt.hardware [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1410.154191] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0dda17-6f36-4cfa-b3b7-6ab1d8317375 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.165048] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f766ad-834f-4770-b1cc-f60fbdd401bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.260484] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695345, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.284981] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695346, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.641021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.555s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.641021] env[62820]: DEBUG nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1410.650217] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.020s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.651611] env[62820]: INFO nova.compute.claims [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1410.681362] env[62820]: DEBUG nova.network.neutron [req-3982b44b-5fd0-4d1b-932c-6632244cd66f req-f3668747-f555-42a1-9d94-7379cfd4a754 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updated VIF entry in instance network info cache for port e5e97928-d469-42c4-9621-ed449eeebf5c. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1410.682448] env[62820]: DEBUG nova.network.neutron [req-3982b44b-5fd0-4d1b-932c-6632244cd66f req-f3668747-f555-42a1-9d94-7379cfd4a754 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updating instance_info_cache with network_info: [{"id": "e5e97928-d469-42c4-9621-ed449eeebf5c", "address": "fa:16:3e:ec:4d:db", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5e97928-d4", "ovs_interfaceid": "e5e97928-d469-42c4-9621-ed449eeebf5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.758768] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695345, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744112} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.759121] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1410.759345] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1410.759602] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f83471d2-a30c-435c-b71d-39fe9d082219 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.766639] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1410.766639] env[62820]: value = "task-1695347" [ 1410.766639] env[62820]: _type = "Task" [ 1410.766639] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.775846] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695347, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.785094] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695346, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.819265] env[62820]: DEBUG nova.network.neutron [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Successfully updated port: e926a317-6604-4398-a77f-420aaa352075 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1411.159742] env[62820]: DEBUG nova.compute.utils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1411.161223] env[62820]: DEBUG nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1411.161395] env[62820]: DEBUG nova.network.neutron [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1411.186281] env[62820]: DEBUG oslo_concurrency.lockutils [req-3982b44b-5fd0-4d1b-932c-6632244cd66f req-f3668747-f555-42a1-9d94-7379cfd4a754 service nova] Releasing lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.214884] env[62820]: DEBUG nova.compute.manager [None req-35bdc6f6-0ddd-4966-bc01-aa6962fac76b tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1411.216065] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fe76db-4cd9-48da-b4bd-a00226710808 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.233767] env[62820]: DEBUG nova.policy [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d43a716b15a4fb5a628b33b5ca8afe2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bfd16891a3f453da8583d65051a2afb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1411.285141] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695347, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068219} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.290109] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1411.291056] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be5fa1d-8b80-47fd-b7eb-61ad0c875db5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.299777] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695346, 'name': CreateVM_Task, 'duration_secs': 1.449736} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.307359] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1411.316292] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1411.317051] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.317220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.317522] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1411.318943] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70557383-b811-43ee-8af6-1d6cddb2bb44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.333635] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f71c37f-b85e-4bab-9c9b-78cf03739f0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.335668] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "refresh_cache-06fb6034-e010-49bd-9e5e-7699a43dd5a9" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.335804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquired lock "refresh_cache-06fb6034-e010-49bd-9e5e-7699a43dd5a9" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.335953] env[62820]: DEBUG nova.network.neutron [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1411.342219] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1411.342219] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f81ee0-e424-dd18-edc7-846081d3b1d5" [ 1411.342219] env[62820]: _type = "Task" [ 1411.342219] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.346886] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1411.346886] env[62820]: value = "task-1695348" [ 1411.346886] env[62820]: _type = "Task" [ 1411.346886] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.355762] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f81ee0-e424-dd18-edc7-846081d3b1d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009809} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.356391] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.356627] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1411.358016] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.358016] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.358016] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1411.358016] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58a28221-4022-44ad-940b-6e7de177bfa1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.362537] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695348, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.370113] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1411.370113] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1411.370113] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7ba6a01-4eb1-4026-ae48-8e6c73755507 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.375680] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1411.375680] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b5f103-be3b-f464-6191-46a5233f0f21" [ 1411.375680] env[62820]: _type = "Task" [ 1411.375680] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.383485] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b5f103-be3b-f464-6191-46a5233f0f21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.448485] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.448865] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.592032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquiring lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.592032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.592032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquiring lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.592032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.592032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.592032] env[62820]: INFO nova.compute.manager [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Terminating instance [ 1411.671235] env[62820]: DEBUG nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1411.696537] env[62820]: DEBUG nova.network.neutron [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Successfully created port: 927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.727180] env[62820]: INFO nova.compute.manager [None req-35bdc6f6-0ddd-4966-bc01-aa6962fac76b tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] instance snapshotting [ 1411.727935] env[62820]: DEBUG nova.objects.instance [None req-35bdc6f6-0ddd-4966-bc01-aa6962fac76b tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lazy-loading 'flavor' on Instance uuid 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1411.848800] env[62820]: DEBUG nova.compute.manager [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Received event network-vif-plugged-e926a317-6604-4398-a77f-420aaa352075 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1411.849043] env[62820]: DEBUG oslo_concurrency.lockutils [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] Acquiring lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.849619] env[62820]: DEBUG oslo_concurrency.lockutils [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] Lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.849619] env[62820]: DEBUG oslo_concurrency.lockutils [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] Lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.849619] env[62820]: DEBUG nova.compute.manager [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] No waiting events found dispatching network-vif-plugged-e926a317-6604-4398-a77f-420aaa352075 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1411.849864] env[62820]: WARNING nova.compute.manager [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Received unexpected event network-vif-plugged-e926a317-6604-4398-a77f-420aaa352075 for instance with vm_state building and task_state spawning. [ 1411.849864] env[62820]: DEBUG nova.compute.manager [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Received event network-changed-e926a317-6604-4398-a77f-420aaa352075 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1411.850062] env[62820]: DEBUG nova.compute.manager [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Refreshing instance network info cache due to event network-changed-e926a317-6604-4398-a77f-420aaa352075. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1411.850299] env[62820]: DEBUG oslo_concurrency.lockutils [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] Acquiring lock "refresh_cache-06fb6034-e010-49bd-9e5e-7699a43dd5a9" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.865862] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695348, 'name': ReconfigVM_Task, 'duration_secs': 0.328605} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.865862] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718/766dd26e-3866-4ef3-bd87-b81e5f6bc718.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1411.865862] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29dc8600-4618-4dc3-913b-3e2e357e98fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.877164] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1411.877164] env[62820]: value = "task-1695349" [ 1411.877164] env[62820]: _type = "Task" [ 1411.877164] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.891122] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b5f103-be3b-f464-6191-46a5233f0f21, 'name': SearchDatastore_Task, 'duration_secs': 0.009889} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.898698] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695349, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.899486] env[62820]: DEBUG nova.network.neutron [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1411.905028] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a382fdbe-5525-4b60-8a75-89595d521a3d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.911860] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1411.911860] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52055b33-399c-7c84-0739-a0f55052bb19" [ 1411.911860] env[62820]: _type = "Task" [ 1411.911860] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.923757] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52055b33-399c-7c84-0739-a0f55052bb19, 'name': SearchDatastore_Task, 'duration_secs': 0.009187} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.927275] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.927640] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c/bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1411.933039] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dffcdcb0-834e-4969-b056-c3fa456fefd0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.943606] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1411.943606] env[62820]: value = "task-1695350" [ 1411.943606] env[62820]: _type = "Task" [ 1411.943606] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.953649] env[62820]: DEBUG nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1411.957008] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.097733] env[62820]: DEBUG nova.network.neutron [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Updating instance_info_cache with network_info: [{"id": "e926a317-6604-4398-a77f-420aaa352075", "address": "fa:16:3e:45:94:ee", "network": {"id": "c36362ee-43d0-4ef2-9606-9edd610b797e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-171616310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70596f9fef8b41d5a570a8ca3c474700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape926a317-66", "ovs_interfaceid": "e926a317-6604-4398-a77f-420aaa352075", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.099548] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquiring lock "refresh_cache-4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.099870] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquired lock "refresh_cache-4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.100203] env[62820]: DEBUG nova.network.neutron [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.187475] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0f2d27-dc80-4c52-9930-9953cf10a1be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.196090] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ada4bfa-4d0d-461a-8023-3b3e06215667 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.234996] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f72ad9c-e6f7-4bce-857b-3ac9f4c93565 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.238701] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef5d28d-d1b2-4351-abf1-82930df082d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.248268] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19a1124-c016-4b8d-9e53-edbde62466f8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.264762] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc163d61-5b9d-4d83-a918-f24ba07a479f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.275979] env[62820]: DEBUG nova.compute.provider_tree [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1412.392634] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695349, 'name': Rename_Task, 'duration_secs': 0.140489} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.392986] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1412.393264] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d7afd50-1ec9-4c21-9fd5-7b3a6ce4809a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.400021] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Waiting for the task: (returnval){ [ 1412.400021] env[62820]: value = "task-1695351" [ 1412.400021] env[62820]: _type = "Task" [ 1412.400021] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.408993] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695351, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.455524] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695350, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481786} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.455871] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c/bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1412.456099] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1412.456357] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a1e22f0-cca3-4d18-871b-8f8631ef0f34 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.463643] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1412.463643] env[62820]: value = "task-1695352" [ 1412.463643] env[62820]: _type = "Task" [ 1412.463643] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.473811] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695352, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.478803] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.603114] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Releasing lock "refresh_cache-06fb6034-e010-49bd-9e5e-7699a43dd5a9" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.603476] env[62820]: DEBUG nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Instance network_info: |[{"id": "e926a317-6604-4398-a77f-420aaa352075", "address": "fa:16:3e:45:94:ee", "network": {"id": "c36362ee-43d0-4ef2-9606-9edd610b797e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-171616310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70596f9fef8b41d5a570a8ca3c474700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape926a317-66", "ovs_interfaceid": "e926a317-6604-4398-a77f-420aaa352075", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1412.605788] env[62820]: DEBUG oslo_concurrency.lockutils [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] Acquired lock "refresh_cache-06fb6034-e010-49bd-9e5e-7699a43dd5a9" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.605992] env[62820]: DEBUG nova.network.neutron [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Refreshing network info cache for port e926a317-6604-4398-a77f-420aaa352075 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1412.608069] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:94:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4adc8ed0-d11a-4510-9be0-b27c0da3a903', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e926a317-6604-4398-a77f-420aaa352075', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1412.617106] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Creating folder: Project (70596f9fef8b41d5a570a8ca3c474700). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1412.618284] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f2a98fa-744e-496c-99f6-4e76b66e9843 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.631052] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Created folder: Project (70596f9fef8b41d5a570a8ca3c474700) in parent group-v353379. [ 1412.631154] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Creating folder: Instances. Parent ref: group-v353488. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1412.631408] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f6d07e3-82b9-40be-bbe1-591ddb8a25ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.641489] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Created folder: Instances in parent group-v353488. [ 1412.641727] env[62820]: DEBUG oslo.service.loopingcall [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.641947] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1412.642196] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c905bee-53a5-49bd-b568-ef2856d9dd85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.659056] env[62820]: DEBUG nova.network.neutron [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1412.666166] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1412.666166] env[62820]: value = "task-1695355" [ 1412.666166] env[62820]: _type = "Task" [ 1412.666166] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.674056] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695355, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.679298] env[62820]: DEBUG nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1412.708555] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1412.708804] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1412.708963] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.709163] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1412.709308] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.709464] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1412.709681] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1412.709840] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1412.710013] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1412.710640] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1412.710838] env[62820]: DEBUG nova.virt.hardware [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1412.711713] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc613dc-778a-4bf1-b79d-f376f5f5ceab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.723779] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6932806-3153-459e-a389-3205bc1acc6c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.738076] env[62820]: DEBUG nova.network.neutron [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.789853] env[62820]: DEBUG nova.compute.manager [None req-35bdc6f6-0ddd-4966-bc01-aa6962fac76b tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Instance disappeared during snapshot {{(pid=62820) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1412.797016] env[62820]: ERROR nova.scheduler.client.report [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [req-a9151402-cfe6-40f5-8ce8-1701a5ece46a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a9151402-cfe6-40f5-8ce8-1701a5ece46a"}]} [ 1412.812633] env[62820]: DEBUG nova.scheduler.client.report [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1412.826396] env[62820]: DEBUG nova.scheduler.client.report [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1412.826627] env[62820]: DEBUG nova.compute.provider_tree [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1412.838242] env[62820]: DEBUG nova.scheduler.client.report [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1412.858516] env[62820]: DEBUG nova.scheduler.client.report [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1412.909827] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695351, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.938897] env[62820]: DEBUG nova.compute.manager [None req-35bdc6f6-0ddd-4966-bc01-aa6962fac76b tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Found 0 images (rotation: 2) {{(pid=62820) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4898}} [ 1412.983721] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695352, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097826} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.983721] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1412.984593] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b853dc-1ae1-4e3e-9999-ddfc507ca159 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.011330] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c/bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1413.014212] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d8bbc28-b112-4c57-965e-8a4d8c7eea7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.039112] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1413.039112] env[62820]: value = "task-1695356" [ 1413.039112] env[62820]: _type = "Task" [ 1413.039112] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.040430] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Acquiring lock "90ea0c16-739a-4132-ac36-e154a846b9c2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.040681] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.059189] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695356, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.176667] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695355, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.240809] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Releasing lock "refresh_cache-4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.241247] env[62820]: DEBUG nova.compute.manager [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1413.241840] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1413.244961] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36fb299-cec4-454e-bcea-a755792d6fc1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.253321] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1413.254348] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfcf767d-2f59-4e29-bddd-a218802a327f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.260464] env[62820]: DEBUG oslo_vmware.api [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1413.260464] env[62820]: value = "task-1695357" [ 1413.260464] env[62820]: _type = "Task" [ 1413.260464] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.273702] env[62820]: DEBUG oslo_vmware.api [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.358252] env[62820]: DEBUG nova.network.neutron [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Updated VIF entry in instance network info cache for port e926a317-6604-4398-a77f-420aaa352075. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1413.358252] env[62820]: DEBUG nova.network.neutron [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Updating instance_info_cache with network_info: [{"id": "e926a317-6604-4398-a77f-420aaa352075", "address": "fa:16:3e:45:94:ee", "network": {"id": "c36362ee-43d0-4ef2-9606-9edd610b797e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-171616310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70596f9fef8b41d5a570a8ca3c474700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape926a317-66", "ovs_interfaceid": "e926a317-6604-4398-a77f-420aaa352075", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.362540] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb09f51-c1ed-4015-bc05-62096ef0a347 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.369886] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1a66f1-d46b-4902-9989-c6fd3606d631 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.404982] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f968733-7f92-45c8-89b2-fa9d4995b6a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.415389] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1e0e2f-1de8-45d5-9356-759a64456b7a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.419214] env[62820]: DEBUG oslo_vmware.api [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Task: {'id': task-1695351, 'name': PowerOnVM_Task, 'duration_secs': 0.747417} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.419744] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1413.419962] env[62820]: DEBUG nova.compute.manager [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1413.420970] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c050e96-d0b9-4b86-94c4-09da5374b2eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.430969] env[62820]: DEBUG nova.compute.provider_tree [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1413.551809] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695356, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.553859] env[62820]: DEBUG nova.compute.utils [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1413.676453] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695355, 'name': CreateVM_Task, 'duration_secs': 0.712997} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.676836] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1413.677361] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.677530] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.677870] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1413.678141] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72b9b0ab-be04-444d-913d-73599da4f02d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.682784] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1413.682784] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c7852b-0268-3282-a00e-b7c534e36b7b" [ 1413.682784] env[62820]: _type = "Task" [ 1413.682784] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.692554] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c7852b-0268-3282-a00e-b7c534e36b7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.772311] env[62820]: DEBUG oslo_vmware.api [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695357, 'name': PowerOffVM_Task, 'duration_secs': 0.184748} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.772556] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1413.772718] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1413.772968] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abc77494-f9ed-473e-bf4b-8849c72b3cc5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.793415] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1413.793588] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1413.793765] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Deleting the datastore file [datastore1] 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1413.793992] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97d71bcb-7bdc-4d59-b509-27ad919d2e13 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.801193] env[62820]: DEBUG oslo_vmware.api [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for the task: (returnval){ [ 1413.801193] env[62820]: value = "task-1695359" [ 1413.801193] env[62820]: _type = "Task" [ 1413.801193] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.808889] env[62820]: DEBUG oslo_vmware.api [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.852727] env[62820]: DEBUG nova.network.neutron [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Successfully updated port: 927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1413.861095] env[62820]: DEBUG oslo_concurrency.lockutils [req-b2df022b-a671-4c6c-9072-cb3dc14c22ee req-9efeba09-4799-4cc1-b040-aece978b1a2b service nova] Releasing lock "refresh_cache-06fb6034-e010-49bd-9e5e-7699a43dd5a9" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.947574] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.977502] env[62820]: DEBUG nova.scheduler.client.report [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 54 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1413.977798] env[62820]: DEBUG nova.compute.provider_tree [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 54 to 55 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1413.978016] env[62820]: DEBUG nova.compute.provider_tree [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1414.053049] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695356, 'name': ReconfigVM_Task, 'duration_secs': 0.557737} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.053380] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Reconfigured VM instance instance-00000020 to attach disk [datastore1] bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c/bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1414.053973] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a96dc7c6-c6eb-46dc-b62b-257372f584e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.056185] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.015s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.060479] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1414.060479] env[62820]: value = "task-1695360" [ 1414.060479] env[62820]: _type = "Task" [ 1414.060479] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.066785] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695360, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.195028] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c7852b-0268-3282-a00e-b7c534e36b7b, 'name': SearchDatastore_Task, 'duration_secs': 0.00972} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.195028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.195028] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1414.195028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.195028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.195028] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1414.195028] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-606d7870-c542-4403-97b4-933193d375ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.203286] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1414.203669] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1414.204548] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96199b67-eff5-4700-97da-95f70fd87af2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.210160] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1414.210160] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527fabc3-1dde-b382-46c4-f1e302222f14" [ 1414.210160] env[62820]: _type = "Task" [ 1414.210160] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.218124] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527fabc3-1dde-b382-46c4-f1e302222f14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.311132] env[62820]: DEBUG oslo_vmware.api [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Task: {'id': task-1695359, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09636} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.312023] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1414.312358] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1414.312675] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1414.312986] env[62820]: INFO nova.compute.manager [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1414.313386] env[62820]: DEBUG oslo.service.loopingcall [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.313736] env[62820]: DEBUG nova.compute.manager [-] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1414.313953] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1414.346089] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.355752] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.355752] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.355752] env[62820]: DEBUG nova.network.neutron [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.485569] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.837s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.486308] env[62820]: DEBUG nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1414.489034] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.724s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.492048] env[62820]: INFO nova.compute.claims [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1414.503264] env[62820]: DEBUG nova.compute.manager [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Received event network-vif-plugged-927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1414.503264] env[62820]: DEBUG oslo_concurrency.lockutils [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] Acquiring lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.503264] env[62820]: DEBUG oslo_concurrency.lockutils [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] Lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.503264] env[62820]: DEBUG oslo_concurrency.lockutils [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] Lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.503264] env[62820]: DEBUG nova.compute.manager [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] No waiting events found dispatching network-vif-plugged-927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1414.503264] env[62820]: WARNING nova.compute.manager [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Received unexpected event network-vif-plugged-927b7951-0ef5-4aa5-b888-5b73266b6951 for instance with vm_state building and task_state spawning. [ 1414.503264] env[62820]: DEBUG nova.compute.manager [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Received event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1414.503838] env[62820]: DEBUG nova.compute.manager [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing instance network info cache due to event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1414.503838] env[62820]: DEBUG oslo_concurrency.lockutils [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] Acquiring lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.570205] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695360, 'name': Rename_Task, 'duration_secs': 0.186574} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.570622] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1414.570907] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8df96bb-77ef-4272-b633-f8c25a702870 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.577820] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1414.577820] env[62820]: value = "task-1695361" [ 1414.577820] env[62820]: _type = "Task" [ 1414.577820] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.586591] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.726216] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527fabc3-1dde-b382-46c4-f1e302222f14, 'name': SearchDatastore_Task, 'duration_secs': 0.008718} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.727063] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da219fce-7b46-4048-b04c-18dceed48b12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.736223] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1414.736223] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52dd5463-9468-641e-cf1d-09e06142c3de" [ 1414.736223] env[62820]: _type = "Task" [ 1414.736223] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.749311] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52dd5463-9468-641e-cf1d-09e06142c3de, 'name': SearchDatastore_Task, 'duration_secs': 0.009639} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.749599] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.749844] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 06fb6034-e010-49bd-9e5e-7699a43dd5a9/06fb6034-e010-49bd-9e5e-7699a43dd5a9.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1414.750134] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e42199c6-6f29-4c72-a861-2c1bdfa4b5af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.757567] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1414.757567] env[62820]: value = "task-1695362" [ 1414.757567] env[62820]: _type = "Task" [ 1414.757567] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.769776] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.847332] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.908317] env[62820]: DEBUG nova.network.neutron [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.998902] env[62820]: DEBUG nova.compute.utils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1415.003467] env[62820]: DEBUG nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1415.005892] env[62820]: DEBUG nova.network.neutron [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1415.091833] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695361, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.098967] env[62820]: DEBUG nova.policy [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2a98cf26a4949abadead50c7354a638', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '04698d19505d400594ce250863e15456', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1415.149916] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Acquiring lock "90ea0c16-739a-4132-ac36-e154a846b9c2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.150213] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.150463] env[62820]: INFO nova.compute.manager [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Attaching volume deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7 to /dev/sdb [ 1415.191643] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b51f806-7e3d-4488-b8c5-539b9f8a5228 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.201570] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab51e4aa-11de-443a-ab63-2a105454d5b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.215738] env[62820]: DEBUG nova.virt.block_device [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Updating existing volume attachment record: 7b291d67-1a33-4a6f-8a8d-429ea5210898 {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1415.228452] env[62820]: DEBUG nova.network.neutron [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updating instance_info_cache with network_info: [{"id": "927b7951-0ef5-4aa5-b888-5b73266b6951", "address": "fa:16:3e:df:db:ef", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap927b7951-0e", "ovs_interfaceid": "927b7951-0ef5-4aa5-b888-5b73266b6951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.273198] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467339} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.273508] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 06fb6034-e010-49bd-9e5e-7699a43dd5a9/06fb6034-e010-49bd-9e5e-7699a43dd5a9.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1415.273801] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1415.274142] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-886e2ddb-6912-4f99-902d-ecf7b2892c3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.283222] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1415.283222] env[62820]: value = "task-1695363" [ 1415.283222] env[62820]: _type = "Task" [ 1415.283222] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.293690] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.349988] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.350439] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.350439] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.354337] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.004s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.354528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.356381] env[62820]: INFO nova.compute.manager [-] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Took 1.04 seconds to deallocate network for instance. [ 1415.356894] env[62820]: INFO nova.compute.manager [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Terminating instance [ 1415.480375] env[62820]: DEBUG nova.network.neutron [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Successfully created port: 5af0a5c5-a176-477e-b59a-fa82e9eea9a7 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1415.507766] env[62820]: DEBUG nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1415.591669] env[62820]: DEBUG oslo_vmware.api [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695361, 'name': PowerOnVM_Task, 'duration_secs': 0.549912} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.592645] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1415.592645] env[62820]: INFO nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Took 8.19 seconds to spawn the instance on the hypervisor. [ 1415.592645] env[62820]: DEBUG nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1415.593299] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd8b3a7-0314-4a5d-8c58-0987625384f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.733976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Releasing lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.734341] env[62820]: DEBUG nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Instance network_info: |[{"id": "927b7951-0ef5-4aa5-b888-5b73266b6951", "address": "fa:16:3e:df:db:ef", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap927b7951-0e", "ovs_interfaceid": "927b7951-0ef5-4aa5-b888-5b73266b6951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1415.734972] env[62820]: DEBUG oslo_concurrency.lockutils [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] Acquired lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.734972] env[62820]: DEBUG nova.network.neutron [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1415.736079] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:db:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a485857d-7086-4dcf-9d65-d0dcd177fcb0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '927b7951-0ef5-4aa5-b888-5b73266b6951', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.743998] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Creating folder: Project (3bfd16891a3f453da8583d65051a2afb). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.748878] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-102aa64a-bd47-4322-9d04-cdb23878dcc4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.761460] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Created folder: Project (3bfd16891a3f453da8583d65051a2afb) in parent group-v353379. [ 1415.763923] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Creating folder: Instances. Parent ref: group-v353494. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.763923] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffb11d6f-8ff3-4be6-954c-afaed4850f8a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.776897] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Created folder: Instances in parent group-v353494. [ 1415.776897] env[62820]: DEBUG oslo.service.loopingcall [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.776897] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.776897] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fdbbd87-d1a3-4785-b730-8b15a146871d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.810207] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174949} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.811985] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1415.812337] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1415.812337] env[62820]: value = "task-1695369" [ 1415.812337] env[62820]: _type = "Task" [ 1415.812337] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.813167] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e908e83e-61af-4e2d-83f1-c4cc8288a0e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.847826] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 06fb6034-e010-49bd-9e5e-7699a43dd5a9/06fb6034-e010-49bd-9e5e-7699a43dd5a9.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.855334] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9da7e32d-64c7-44ce-9d9d-4cc9e68425fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.869602] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695369, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.870471] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "refresh_cache-766dd26e-3866-4ef3-bd87-b81e5f6bc718" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.870581] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquired lock "refresh_cache-766dd26e-3866-4ef3-bd87-b81e5f6bc718" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.870759] env[62820]: DEBUG nova.network.neutron [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1415.874104] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.877057] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1415.877057] env[62820]: value = "task-1695370" [ 1415.877057] env[62820]: _type = "Task" [ 1415.877057] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.886707] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695370, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.119406] env[62820]: INFO nova.compute.manager [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Took 43.37 seconds to build instance. [ 1416.132040] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a558472-1844-4fd1-949e-e429b62ca2ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.141267] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5995e4-5438-482f-89f9-a515fb247b4b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.172781] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5227d8ca-0ab7-49b0-b7a7-838d5c16e0f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.183219] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba4e72f-3758-40e0-90ff-e03f128fa0b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.194370] env[62820]: DEBUG nova.compute.provider_tree [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.326961] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695369, 'name': CreateVM_Task, 'duration_secs': 0.366406} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.327163] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1416.327850] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.328053] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.328434] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1416.328679] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-052a76c0-6a89-46e6-a908-b48e269ccefe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.333902] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1416.333902] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5259d831-c34b-f2e0-7a2c-2b835bdfa596" [ 1416.333902] env[62820]: _type = "Task" [ 1416.333902] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.341984] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5259d831-c34b-f2e0-7a2c-2b835bdfa596, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.385948] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.396726] env[62820]: DEBUG nova.network.neutron [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1416.518830] env[62820]: DEBUG nova.network.neutron [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.520804] env[62820]: DEBUG nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1416.549334] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='72727140-45c5-4368-9f13-c12a8d0ec9dc',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2056238791',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1416.549334] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1416.549334] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1416.549334] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1416.549334] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1416.549334] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1416.549334] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1416.549808] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1416.549808] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1416.549924] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1416.550104] env[62820]: DEBUG nova.virt.hardware [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1416.551133] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0fb36b-f4ad-465f-986f-86f3e52bfc4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.560700] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61f7591-63f8-4bf2-bb9b-80a34ef2477d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.606020] env[62820]: DEBUG nova.network.neutron [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updated VIF entry in instance network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1416.606406] env[62820]: DEBUG nova.network.neutron [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updating instance_info_cache with network_info: [{"id": "927b7951-0ef5-4aa5-b888-5b73266b6951", "address": "fa:16:3e:df:db:ef", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap927b7951-0e", "ovs_interfaceid": "927b7951-0ef5-4aa5-b888-5b73266b6951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.622443] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6bc377ea-c746-4e01-837a-5ddb2cf0928a tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.972s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.697287] env[62820]: DEBUG nova.scheduler.client.report [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1416.700563] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquiring lock "1926c780-faea-40d8-a00b-6ad576349a68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.700855] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "1926c780-faea-40d8-a00b-6ad576349a68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.849719] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5259d831-c34b-f2e0-7a2c-2b835bdfa596, 'name': SearchDatastore_Task, 'duration_secs': 0.019543} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.849719] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.849719] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1416.849719] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.849719] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.849719] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1416.849719] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-226ef042-b393-4812-8722-e7dcfe0b94ea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.858327] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1416.858551] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1416.859284] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7409d23e-8dda-489f-9541-c59758b02a7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.865685] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1416.865685] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e15e87-a1fd-55ef-b9dc-650ee8f117cd" [ 1416.865685] env[62820]: _type = "Task" [ 1416.865685] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.874445] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e15e87-a1fd-55ef-b9dc-650ee8f117cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.885208] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695370, 'name': ReconfigVM_Task, 'duration_secs': 0.884142} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.885467] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 06fb6034-e010-49bd-9e5e-7699a43dd5a9/06fb6034-e010-49bd-9e5e-7699a43dd5a9.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1416.886250] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1926e96d-e8fd-4096-93f1-8adfff185643 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.891619] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1416.891619] env[62820]: value = "task-1695371" [ 1416.891619] env[62820]: _type = "Task" [ 1416.891619] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.903421] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695371, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.024060] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Releasing lock "refresh_cache-766dd26e-3866-4ef3-bd87-b81e5f6bc718" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.024547] env[62820]: DEBUG nova.compute.manager [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1417.024747] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1417.025633] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1295ac-e990-4445-870b-90e7464ffa52 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.033454] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1417.033759] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a7aec7c-b1f9-4daa-ac9c-335014510da0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.040056] env[62820]: DEBUG oslo_vmware.api [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1417.040056] env[62820]: value = "task-1695372" [ 1417.040056] env[62820]: _type = "Task" [ 1417.040056] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.048266] env[62820]: DEBUG oslo_vmware.api [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695372, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.109091] env[62820]: DEBUG oslo_concurrency.lockutils [req-cd3a3529-16ce-4680-b16b-65465b46d48c req-d136e5f9-80f7-4fb6-8250-ee7a621391b6 service nova] Releasing lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.203833] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.204654] env[62820]: DEBUG nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1417.207520] env[62820]: DEBUG nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1417.209797] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.006s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.209989] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.214627] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.422s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.214627] env[62820]: INFO nova.compute.claims [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1417.218281] env[62820]: DEBUG nova.compute.manager [req-ae2d1197-2040-49e9-a376-ce4ff61592ac req-bf7eaa2b-4033-4fa9-a4b5-7ae441514233 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Received event network-vif-plugged-5af0a5c5-a176-477e-b59a-fa82e9eea9a7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1417.225057] env[62820]: DEBUG oslo_concurrency.lockutils [req-ae2d1197-2040-49e9-a376-ce4ff61592ac req-bf7eaa2b-4033-4fa9-a4b5-7ae441514233 service nova] Acquiring lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.225057] env[62820]: DEBUG oslo_concurrency.lockutils [req-ae2d1197-2040-49e9-a376-ce4ff61592ac req-bf7eaa2b-4033-4fa9-a4b5-7ae441514233 service nova] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.225057] env[62820]: DEBUG oslo_concurrency.lockutils [req-ae2d1197-2040-49e9-a376-ce4ff61592ac req-bf7eaa2b-4033-4fa9-a4b5-7ae441514233 service nova] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.225057] env[62820]: DEBUG nova.compute.manager [req-ae2d1197-2040-49e9-a376-ce4ff61592ac req-bf7eaa2b-4033-4fa9-a4b5-7ae441514233 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] No waiting events found dispatching network-vif-plugged-5af0a5c5-a176-477e-b59a-fa82e9eea9a7 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1417.225057] env[62820]: WARNING nova.compute.manager [req-ae2d1197-2040-49e9-a376-ce4ff61592ac req-bf7eaa2b-4033-4fa9-a4b5-7ae441514233 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Received unexpected event network-vif-plugged-5af0a5c5-a176-477e-b59a-fa82e9eea9a7 for instance with vm_state building and task_state spawning. [ 1417.259740] env[62820]: INFO nova.scheduler.client.report [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Deleted allocations for instance 846e8df9-b925-4d2e-a90e-4e774c35d0b4 [ 1417.333118] env[62820]: DEBUG nova.compute.manager [None req-aa3391d6-7192-4bcb-a024-3a55d1ac87ab tempest-ServerDiagnosticsV248Test-819063366 tempest-ServerDiagnosticsV248Test-819063366-project-admin] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1417.334981] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5911d130-8aeb-41ad-ad93-e3b85b8b836e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.343792] env[62820]: INFO nova.compute.manager [None req-aa3391d6-7192-4bcb-a024-3a55d1ac87ab tempest-ServerDiagnosticsV248Test-819063366 tempest-ServerDiagnosticsV248Test-819063366-project-admin] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Retrieving diagnostics [ 1417.344990] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be3277e-9bb7-4866-b3d0-f9690b483634 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.385540] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e15e87-a1fd-55ef-b9dc-650ee8f117cd, 'name': SearchDatastore_Task, 'duration_secs': 0.008355} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.386467] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d7cb6ca-c17c-48dc-9fb2-39601176351b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.392406] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1417.392406] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ca8292-8c64-f88b-f25d-17c7fcc09602" [ 1417.392406] env[62820]: _type = "Task" [ 1417.392406] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.395514] env[62820]: DEBUG nova.network.neutron [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Successfully updated port: 5af0a5c5-a176-477e-b59a-fa82e9eea9a7 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1417.407778] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695371, 'name': Rename_Task, 'duration_secs': 0.397165} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.411777] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1417.412276] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ca8292-8c64-f88b-f25d-17c7fcc09602, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.413026] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98602007-f9b6-422f-b263-f00c0790713d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.419103] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1417.419103] env[62820]: value = "task-1695373" [ 1417.419103] env[62820]: _type = "Task" [ 1417.419103] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.427689] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.550753] env[62820]: DEBUG oslo_vmware.api [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695372, 'name': PowerOffVM_Task, 'duration_secs': 0.266123} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.551119] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1417.551364] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1417.551771] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c5e9a6d-a003-4a67-80f4-60d9c8bb91c4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.577677] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1417.578146] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1417.578479] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Deleting the datastore file [datastore1] 766dd26e-3866-4ef3-bd87-b81e5f6bc718 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1417.578857] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efd2fefe-e1cb-4ca3-a8c7-e0304001e11e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.585483] env[62820]: DEBUG oslo_vmware.api [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for the task: (returnval){ [ 1417.585483] env[62820]: value = "task-1695376" [ 1417.585483] env[62820]: _type = "Task" [ 1417.585483] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.593887] env[62820]: DEBUG oslo_vmware.api [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.723737] env[62820]: DEBUG nova.compute.utils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1417.727684] env[62820]: DEBUG nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1417.727684] env[62820]: DEBUG nova.network.neutron [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1417.751172] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.768351] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20ce1cb0-53a2-46cc-9d81-a515b21145ca tempest-ImagesOneServerTestJSON-1080800536 tempest-ImagesOneServerTestJSON-1080800536-project-member] Lock "846e8df9-b925-4d2e-a90e-4e774c35d0b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.831s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.794458] env[62820]: DEBUG nova.policy [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1774425893284b41bc5168166f88794d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c558a3424c74ab2bface054c43cf8b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1417.907921] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.908127] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.908354] env[62820]: DEBUG nova.network.neutron [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1417.909527] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ca8292-8c64-f88b-f25d-17c7fcc09602, 'name': SearchDatastore_Task, 'duration_secs': 0.015101} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.910037] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.910301] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] aa98dbb0-5ff7-4da5-a365-2b55a8bd2216/aa98dbb0-5ff7-4da5-a365-2b55a8bd2216.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1417.912319] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40c8a082-24e6-4d6f-b599-f3914ad09116 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.920413] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1417.920413] env[62820]: value = "task-1695377" [ 1417.920413] env[62820]: _type = "Task" [ 1417.920413] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.938107] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695373, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.942512] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.097411] env[62820]: DEBUG oslo_vmware.api [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Task: {'id': task-1695376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095652} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.097697] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1418.097885] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1418.098079] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1418.098257] env[62820]: INFO nova.compute.manager [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1418.098508] env[62820]: DEBUG oslo.service.loopingcall [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1418.098699] env[62820]: DEBUG nova.compute.manager [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1418.098799] env[62820]: DEBUG nova.network.neutron [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1418.118924] env[62820]: DEBUG nova.network.neutron [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.232994] env[62820]: DEBUG nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1418.435266] env[62820]: DEBUG oslo_vmware.api [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695373, 'name': PowerOnVM_Task, 'duration_secs': 0.55606} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.436329] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1418.436720] env[62820]: INFO nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1418.437191] env[62820]: DEBUG nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1418.442334] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53418577-5b04-40bf-8407-78c460dc9e47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.444897] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695377, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.469097] env[62820]: DEBUG nova.network.neutron [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.505316] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquiring lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.505582] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.505836] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquiring lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.506031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.506206] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.509273] env[62820]: INFO nova.compute.manager [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Terminating instance [ 1418.621254] env[62820]: DEBUG nova.network.neutron [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.687861] env[62820]: DEBUG nova.network.neutron [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance_info_cache with network_info: [{"id": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "address": "fa:16:3e:04:22:08", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af0a5c5-a1", "ovs_interfaceid": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.725071] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a8f867-2187-44f6-a2bd-0eba2d401028 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.734433] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e84db57-062c-4809-ad3f-d8e455c8b200 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.775447] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcdebf6-8647-4ce5-bb54-5d176962f67b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.783713] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57574c0d-30ce-4a29-a3eb-4ef7565ed64a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.799322] env[62820]: DEBUG nova.compute.provider_tree [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1418.801829] env[62820]: DEBUG nova.network.neutron [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Successfully created port: 1da745d0-cf16-4aea-b62a-dcdc1c42e0c8 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1418.939441] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.909961} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.939710] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] aa98dbb0-5ff7-4da5-a365-2b55a8bd2216/aa98dbb0-5ff7-4da5-a365-2b55a8bd2216.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1418.939921] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1418.940184] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-642031be-13fe-42fa-a691-4f0a8801bfc5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.946265] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1418.946265] env[62820]: value = "task-1695378" [ 1418.946265] env[62820]: _type = "Task" [ 1418.946265] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.962130] env[62820]: INFO nova.compute.manager [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Took 42.03 seconds to build instance. [ 1418.966394] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695378, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.017231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquiring lock "refresh_cache-d040f935-566b-4bbe-b9f6-379fd1dc1a91" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.017443] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquired lock "refresh_cache-d040f935-566b-4bbe-b9f6-379fd1dc1a91" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.017600] env[62820]: DEBUG nova.network.neutron [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1419.123912] env[62820]: INFO nova.compute.manager [-] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Took 1.02 seconds to deallocate network for instance. [ 1419.190905] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.191245] env[62820]: DEBUG nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Instance network_info: |[{"id": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "address": "fa:16:3e:04:22:08", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af0a5c5-a1", "ovs_interfaceid": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1419.191670] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:22:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5af0a5c5-a176-477e-b59a-fa82e9eea9a7', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1419.203547] env[62820]: DEBUG oslo.service.loopingcall [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1419.204674] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1419.204937] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f8c1bb9-2f2d-4d06-be9a-2d706c4fb2c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.232640] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1419.232640] env[62820]: value = "task-1695379" [ 1419.232640] env[62820]: _type = "Task" [ 1419.232640] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.243192] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695379, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.245953] env[62820]: DEBUG nova.compute.manager [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received event network-changed-e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1419.246148] env[62820]: DEBUG nova.compute.manager [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Refreshing instance network info cache due to event network-changed-e5e97928-d469-42c4-9621-ed449eeebf5c. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1419.246362] env[62820]: DEBUG oslo_concurrency.lockutils [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] Acquiring lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.246503] env[62820]: DEBUG oslo_concurrency.lockutils [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] Acquired lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.246759] env[62820]: DEBUG nova.network.neutron [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Refreshing network info cache for port e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1419.251695] env[62820]: DEBUG nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1419.286976] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1419.287359] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1419.287614] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1419.287896] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1419.288113] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1419.288364] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1419.288644] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1419.288853] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1419.289193] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1419.289419] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1419.289642] env[62820]: DEBUG nova.virt.hardware [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1419.290991] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20946a6-2f3c-4c61-8df0-79d7f5d58661 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.299747] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7af69c2-bedc-484d-a789-300e6c9f60dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.304325] env[62820]: DEBUG nova.scheduler.client.report [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1419.457985] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695378, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073138} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.458344] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1419.459072] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6243c569-34ed-4f1d-a052-1b969e91900a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.464159] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c0b0fd5a-3400-4ab9-b77c-e841ac09619c tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.258s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.481460] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] aa98dbb0-5ff7-4da5-a365-2b55a8bd2216/aa98dbb0-5ff7-4da5-a365-2b55a8bd2216.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.481769] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ea0b188-10b8-436d-af27-0eeea8e144f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.508657] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1419.508657] env[62820]: value = "task-1695380" [ 1419.508657] env[62820]: _type = "Task" [ 1419.508657] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.516293] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695380, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.545423] env[62820]: DEBUG nova.network.neutron [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1419.631263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.650328] env[62820]: DEBUG nova.network.neutron [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.744699] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695379, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.785842] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1419.786152] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353493', 'volume_id': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'name': 'volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '90ea0c16-739a-4132-ac36-e154a846b9c2', 'attached_at': '', 'detached_at': '', 'volume_id': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'serial': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1419.787250] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee5481a-3570-4fa3-8fe1-571db814abdc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.807363] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcc872d-6234-43c5-8364-22e614c10042 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.814277] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.602s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.815036] env[62820]: DEBUG nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1419.821045] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.567s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.821045] env[62820]: DEBUG nova.objects.instance [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lazy-loading 'resources' on Instance uuid 6176f083-b61a-40d6-90a0-680b628a1e08 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1419.846029] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7/volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.846598] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d9540d2-6ee6-44b0-9fc0-362978a84d26 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.865662] env[62820]: DEBUG oslo_vmware.api [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Waiting for the task: (returnval){ [ 1419.865662] env[62820]: value = "task-1695381" [ 1419.865662] env[62820]: _type = "Task" [ 1419.865662] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.873693] env[62820]: DEBUG oslo_vmware.api [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Task: {'id': task-1695381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.018616] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695380, 'name': ReconfigVM_Task, 'duration_secs': 0.273122} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.018942] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Reconfigured VM instance instance-00000022 to attach disk [datastore1] aa98dbb0-5ff7-4da5-a365-2b55a8bd2216/aa98dbb0-5ff7-4da5-a365-2b55a8bd2216.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1420.019791] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e50bae53-2811-4602-b8b2-8bdd4651ef74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.026662] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1420.026662] env[62820]: value = "task-1695382" [ 1420.026662] env[62820]: _type = "Task" [ 1420.026662] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.035242] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695382, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.152586] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Releasing lock "refresh_cache-d040f935-566b-4bbe-b9f6-379fd1dc1a91" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.153051] env[62820]: DEBUG nova.compute.manager [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1420.153284] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1420.154329] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23221696-3110-4559-8b8a-ade6a44c1bc6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.162062] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.162316] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-315aa3c9-9494-4b3c-a768-24a6fa6b8df3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.169208] env[62820]: DEBUG oslo_vmware.api [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1420.169208] env[62820]: value = "task-1695383" [ 1420.169208] env[62820]: _type = "Task" [ 1420.169208] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.180121] env[62820]: DEBUG oslo_vmware.api [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.243343] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695379, 'name': CreateVM_Task, 'duration_secs': 0.653346} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.243515] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1420.244224] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.244391] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.244708] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1420.245092] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a2d144f-e842-4123-8813-a57d0540c384 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.249733] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1420.249733] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525d3125-2c92-f93b-b2e7-ba79f7ebcb2e" [ 1420.249733] env[62820]: _type = "Task" [ 1420.249733] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.257573] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525d3125-2c92-f93b-b2e7-ba79f7ebcb2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.315133] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ecc78454-d7ee-474b-a550-474cac2ac171 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "interface-06fb6034-e010-49bd-9e5e-7699a43dd5a9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.315427] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ecc78454-d7ee-474b-a550-474cac2ac171 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "interface-06fb6034-e010-49bd-9e5e-7699a43dd5a9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.315770] env[62820]: DEBUG nova.objects.instance [None req-ecc78454-d7ee-474b-a550-474cac2ac171 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lazy-loading 'flavor' on Instance uuid 06fb6034-e010-49bd-9e5e-7699a43dd5a9 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1420.320183] env[62820]: DEBUG nova.compute.utils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1420.320746] env[62820]: DEBUG nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1420.320909] env[62820]: DEBUG nova.network.neutron [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1420.377106] env[62820]: DEBUG oslo_vmware.api [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Task: {'id': task-1695381, 'name': ReconfigVM_Task, 'duration_secs': 0.366148} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.380175] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Reconfigured VM instance instance-00000006 to attach disk [datastore1] volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7/volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1420.384983] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59ba10bf-a9de-47a2-857f-4ed3a1d6bae1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.396362] env[62820]: DEBUG nova.network.neutron [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updated VIF entry in instance network info cache for port e5e97928-d469-42c4-9621-ed449eeebf5c. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1420.396805] env[62820]: DEBUG nova.network.neutron [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updating instance_info_cache with network_info: [{"id": "e5e97928-d469-42c4-9621-ed449eeebf5c", "address": "fa:16:3e:ec:4d:db", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5e97928-d4", "ovs_interfaceid": "e5e97928-d469-42c4-9621-ed449eeebf5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.403311] env[62820]: DEBUG oslo_vmware.api [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Waiting for the task: (returnval){ [ 1420.403311] env[62820]: value = "task-1695384" [ 1420.403311] env[62820]: _type = "Task" [ 1420.403311] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.417793] env[62820]: DEBUG oslo_vmware.api [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Task: {'id': task-1695384, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.424476] env[62820]: DEBUG nova.policy [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c606501ec683406cb2106e3a1540315e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a069009bc6a741379effea7b50d9e1c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1420.536151] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695382, 'name': Rename_Task, 'duration_secs': 0.157153} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.536556] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1420.536744] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a38d017-bb57-4386-b271-fded1b9ebc66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.544727] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1420.544727] env[62820]: value = "task-1695385" [ 1420.544727] env[62820]: _type = "Task" [ 1420.544727] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.552619] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.637779] env[62820]: DEBUG nova.network.neutron [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Successfully updated port: 1da745d0-cf16-4aea-b62a-dcdc1c42e0c8 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1420.683648] env[62820]: DEBUG oslo_vmware.api [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.762121] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525d3125-2c92-f93b-b2e7-ba79f7ebcb2e, 'name': SearchDatastore_Task, 'duration_secs': 0.00919} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.762121] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.762121] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1420.762121] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.762908] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.763234] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.763597] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0c351f0-6d81-4077-a5c9-ea76e9da7f08 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.772741] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.772741] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.773516] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16394562-8e07-4bdb-b222-a6048dc4892b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.779975] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1420.779975] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52de1cb1-3d9b-5836-0378-747805769fbf" [ 1420.779975] env[62820]: _type = "Task" [ 1420.779975] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.791183] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52de1cb1-3d9b-5836-0378-747805769fbf, 'name': SearchDatastore_Task, 'duration_secs': 0.008906} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.791183] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4173d79d-0bc6-4d15-978d-c8c3b3f4dcec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.796399] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1420.796399] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52fe76d1-2f74-cce8-1444-df2ddd775266" [ 1420.796399] env[62820]: _type = "Task" [ 1420.796399] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.807014] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fe76d1-2f74-cce8-1444-df2ddd775266, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.819350] env[62820]: DEBUG nova.objects.instance [None req-ecc78454-d7ee-474b-a550-474cac2ac171 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lazy-loading 'pci_requests' on Instance uuid 06fb6034-e010-49bd-9e5e-7699a43dd5a9 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1420.826502] env[62820]: DEBUG nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1420.835750] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3cfee2-55ac-4fae-8851-7bd89a103d0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.847489] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073c568b-347f-45a7-935f-3f6059fe9522 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.880101] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b3750b-19f2-46fa-8456-87562eaee786 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.888629] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f170c454-215d-41fc-b1c8-e82071c66f56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.905944] env[62820]: DEBUG oslo_concurrency.lockutils [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] Releasing lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.906227] env[62820]: DEBUG nova.compute.manager [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Received event network-changed-5af0a5c5-a176-477e-b59a-fa82e9eea9a7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1420.906406] env[62820]: DEBUG nova.compute.manager [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Refreshing instance network info cache due to event network-changed-5af0a5c5-a176-477e-b59a-fa82e9eea9a7. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1420.906973] env[62820]: DEBUG oslo_concurrency.lockutils [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] Acquiring lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.906973] env[62820]: DEBUG oslo_concurrency.lockutils [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] Acquired lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.906973] env[62820]: DEBUG nova.network.neutron [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Refreshing network info cache for port 5af0a5c5-a176-477e-b59a-fa82e9eea9a7 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1420.908569] env[62820]: DEBUG nova.compute.provider_tree [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.917758] env[62820]: DEBUG oslo_vmware.api [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Task: {'id': task-1695384, 'name': ReconfigVM_Task, 'duration_secs': 0.162348} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.918945] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353493', 'volume_id': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'name': 'volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '90ea0c16-739a-4132-ac36-e154a846b9c2', 'attached_at': '', 'detached_at': '', 'volume_id': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'serial': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1420.985915] env[62820]: DEBUG nova.network.neutron [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Successfully created port: 0b1a477f-8c32-4c99-892f-23f8332338e2 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1421.053704] env[62820]: DEBUG oslo_vmware.api [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695385, 'name': PowerOnVM_Task, 'duration_secs': 0.496709} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.053987] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1421.054206] env[62820]: INFO nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1421.054386] env[62820]: DEBUG nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1421.055169] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8772cf21-3b50-4299-8fee-90370041d91a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.141888] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquiring lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.142067] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquired lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.142694] env[62820]: DEBUG nova.network.neutron [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1421.180032] env[62820]: DEBUG oslo_vmware.api [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695383, 'name': PowerOffVM_Task, 'duration_secs': 0.860329} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.180326] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1421.180500] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1421.180759] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59bbea16-112b-43af-9bd2-e28107f9f751 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.204767] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1421.204913] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1421.205114] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Deleting the datastore file [datastore1] d040f935-566b-4bbe-b9f6-379fd1dc1a91 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1421.205379] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3243da49-c630-4b72-b1e8-8c64b13ab6c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.212476] env[62820]: DEBUG oslo_vmware.api [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for the task: (returnval){ [ 1421.212476] env[62820]: value = "task-1695387" [ 1421.212476] env[62820]: _type = "Task" [ 1421.212476] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.220641] env[62820]: DEBUG oslo_vmware.api [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.308730] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fe76d1-2f74-cce8-1444-df2ddd775266, 'name': SearchDatastore_Task, 'duration_secs': 0.01558} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.309038] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.309337] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e/ab21fd61-3a44-42fa-92be-51214b0a9a1e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1421.309615] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c506a7b7-4681-4e4b-bc76-0aa585e4c4ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.317264] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1421.317264] env[62820]: value = "task-1695388" [ 1421.317264] env[62820]: _type = "Task" [ 1421.317264] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.321969] env[62820]: DEBUG nova.objects.base [None req-ecc78454-d7ee-474b-a550-474cac2ac171 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Object Instance<06fb6034-e010-49bd-9e5e-7699a43dd5a9> lazy-loaded attributes: flavor,pci_requests {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1421.322172] env[62820]: DEBUG nova.network.neutron [None req-ecc78454-d7ee-474b-a550-474cac2ac171 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1421.329888] env[62820]: DEBUG nova.network.neutron [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Successfully created port: 1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1421.337834] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695388, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.417020] env[62820]: DEBUG nova.scheduler.client.report [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1421.419963] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ecc78454-d7ee-474b-a550-474cac2ac171 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "interface-06fb6034-e010-49bd-9e5e-7699a43dd5a9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.104s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.553499] env[62820]: DEBUG nova.compute.manager [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Received event network-vif-plugged-1da745d0-cf16-4aea-b62a-dcdc1c42e0c8 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1421.553873] env[62820]: DEBUG oslo_concurrency.lockutils [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] Acquiring lock "31639194-b0c4-4eb9-a6f4-e61b067c807f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.554140] env[62820]: DEBUG oslo_concurrency.lockutils [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] Lock "31639194-b0c4-4eb9-a6f4-e61b067c807f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.554442] env[62820]: DEBUG oslo_concurrency.lockutils [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] Lock "31639194-b0c4-4eb9-a6f4-e61b067c807f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.554620] env[62820]: DEBUG nova.compute.manager [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] No waiting events found dispatching network-vif-plugged-1da745d0-cf16-4aea-b62a-dcdc1c42e0c8 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1421.554786] env[62820]: WARNING nova.compute.manager [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Received unexpected event network-vif-plugged-1da745d0-cf16-4aea-b62a-dcdc1c42e0c8 for instance with vm_state building and task_state spawning. [ 1421.554948] env[62820]: DEBUG nova.compute.manager [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Received event network-changed-1da745d0-cf16-4aea-b62a-dcdc1c42e0c8 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1421.555117] env[62820]: DEBUG nova.compute.manager [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Refreshing instance network info cache due to event network-changed-1da745d0-cf16-4aea-b62a-dcdc1c42e0c8. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1421.555282] env[62820]: DEBUG oslo_concurrency.lockutils [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] Acquiring lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.572558] env[62820]: INFO nova.compute.manager [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Took 37.02 seconds to build instance. [ 1421.700939] env[62820]: DEBUG nova.network.neutron [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1421.726328] env[62820]: DEBUG oslo_vmware.api [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Task: {'id': task-1695387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299768} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.726328] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1421.726328] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1421.726328] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1421.726328] env[62820]: INFO nova.compute.manager [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Took 1.57 seconds to destroy the instance on the hypervisor. [ 1421.728549] env[62820]: DEBUG oslo.service.loopingcall [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1421.731078] env[62820]: DEBUG nova.compute.manager [-] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1421.731186] env[62820]: DEBUG nova.network.neutron [-] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1421.765123] env[62820]: DEBUG nova.network.neutron [-] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1421.829967] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "2f917745-28ef-4dfe-8c09-45c15a80145d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.830699] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "2f917745-28ef-4dfe-8c09-45c15a80145d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.830923] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "2f917745-28ef-4dfe-8c09-45c15a80145d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.831132] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "2f917745-28ef-4dfe-8c09-45c15a80145d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.831375] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "2f917745-28ef-4dfe-8c09-45c15a80145d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.833042] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695388, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.833521] env[62820]: INFO nova.compute.manager [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Terminating instance [ 1421.839509] env[62820]: DEBUG nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1421.869395] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1421.869722] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1421.869885] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1421.870377] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1421.870918] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1421.871265] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1421.871573] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1421.871796] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1421.872107] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1421.872447] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1421.872637] env[62820]: DEBUG nova.virt.hardware [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1421.873570] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8505c39-693f-4e64-8eb2-98c194d1e7a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.882977] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c064b517-4ca2-4aff-9f18-1e36651e7784 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.902438] env[62820]: DEBUG nova.network.neutron [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updated VIF entry in instance network info cache for port 5af0a5c5-a176-477e-b59a-fa82e9eea9a7. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1421.902961] env[62820]: DEBUG nova.network.neutron [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance_info_cache with network_info: [{"id": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "address": "fa:16:3e:04:22:08", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af0a5c5-a1", "ovs_interfaceid": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.920918] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.102s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.928652] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.943s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.929386] env[62820]: INFO nova.compute.claims [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1421.949617] env[62820]: INFO nova.scheduler.client.report [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted allocations for instance 6176f083-b61a-40d6-90a0-680b628a1e08 [ 1421.968313] env[62820]: DEBUG nova.network.neutron [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Updating instance_info_cache with network_info: [{"id": "1da745d0-cf16-4aea-b62a-dcdc1c42e0c8", "address": "fa:16:3e:36:34:09", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1da745d0-cf", "ovs_interfaceid": "1da745d0-cf16-4aea-b62a-dcdc1c42e0c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.971868] env[62820]: DEBUG nova.objects.instance [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Lazy-loading 'flavor' on Instance uuid 90ea0c16-739a-4132-ac36-e154a846b9c2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1422.074914] env[62820]: DEBUG oslo_concurrency.lockutils [None req-70f4bf36-54b5-4622-aba1-6bc77039bb7c tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.502s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.268941] env[62820]: DEBUG nova.network.neutron [-] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.330183] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695388, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.75774} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.331533] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e/ab21fd61-3a44-42fa-92be-51214b0a9a1e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1422.331533] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1422.331533] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a42e970e-57a9-4889-8da6-c16bbf41ac27 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.338655] env[62820]: DEBUG nova.compute.manager [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1422.338897] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1422.340022] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1422.340022] env[62820]: value = "task-1695389" [ 1422.340022] env[62820]: _type = "Task" [ 1422.340022] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.340022] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b667bdc1-6f7d-4c0f-b0ad-ee6f8108f023 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.352609] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1422.355513] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9996683c-a60b-46ef-92bb-30d62fb654d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.357205] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695389, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.362277] env[62820]: DEBUG oslo_vmware.api [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1422.362277] env[62820]: value = "task-1695390" [ 1422.362277] env[62820]: _type = "Task" [ 1422.362277] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.371020] env[62820]: DEBUG oslo_vmware.api [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.406861] env[62820]: DEBUG oslo_concurrency.lockutils [req-afb55f7c-fcb3-4acd-b406-920c82bda642 req-1d587f05-29ad-44b4-94b4-fdc76c225541 service nova] Releasing lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.463767] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dceeef5a-d340-41a1-9be9-f19b727aa3a9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "6176f083-b61a-40d6-90a0-680b628a1e08" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.204s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.478708] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Releasing lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.479019] env[62820]: DEBUG nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Instance network_info: |[{"id": "1da745d0-cf16-4aea-b62a-dcdc1c42e0c8", "address": "fa:16:3e:36:34:09", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1da745d0-cf", "ovs_interfaceid": "1da745d0-cf16-4aea-b62a-dcdc1c42e0c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1422.479320] env[62820]: DEBUG oslo_concurrency.lockutils [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] Acquired lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.479502] env[62820]: DEBUG nova.network.neutron [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Refreshing network info cache for port 1da745d0-cf16-4aea-b62a-dcdc1c42e0c8 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.480573] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:34:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1da745d0-cf16-4aea-b62a-dcdc1c42e0c8', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1422.493429] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Creating folder: Project (2c558a3424c74ab2bface054c43cf8b9). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1422.494039] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d06747cc-a092-4dc3-9e43-7f70ed284226 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.344s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.498602] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dee8bc85-4ffd-43ba-a562-9ae780196dcc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.510436] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Created folder: Project (2c558a3424c74ab2bface054c43cf8b9) in parent group-v353379. [ 1422.510991] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Creating folder: Instances. Parent ref: group-v353498. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1422.510991] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d52fb470-065d-488b-9233-de69901d009c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.519942] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Created folder: Instances in parent group-v353498. [ 1422.520200] env[62820]: DEBUG oslo.service.loopingcall [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1422.520388] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1422.520590] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce225904-0901-468b-a047-8887f8de1bc1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.540524] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1422.540524] env[62820]: value = "task-1695393" [ 1422.540524] env[62820]: _type = "Task" [ 1422.540524] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.550973] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695393, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.771933] env[62820]: INFO nova.compute.manager [-] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Took 1.04 seconds to deallocate network for instance. [ 1422.836339] env[62820]: DEBUG nova.network.neutron [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Updated VIF entry in instance network info cache for port 1da745d0-cf16-4aea-b62a-dcdc1c42e0c8. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.836339] env[62820]: DEBUG nova.network.neutron [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Updating instance_info_cache with network_info: [{"id": "1da745d0-cf16-4aea-b62a-dcdc1c42e0c8", "address": "fa:16:3e:36:34:09", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1da745d0-cf", "ovs_interfaceid": "1da745d0-cf16-4aea-b62a-dcdc1c42e0c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.858335] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695389, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070348} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.858335] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1422.858335] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82318c5f-3527-4729-a7aa-367376008910 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.882695] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e/ab21fd61-3a44-42fa-92be-51214b0a9a1e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1422.888841] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6e4ebaa-b1f0-4268-8820-f0002d1589a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.910299] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "b6c58867-914e-4e6e-8092-fc8991dc87f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.910299] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "b6c58867-914e-4e6e-8092-fc8991dc87f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.916412] env[62820]: DEBUG oslo_vmware.api [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695390, 'name': PowerOffVM_Task, 'duration_secs': 0.487024} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.918188] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1422.918554] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1422.919027] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1422.919027] env[62820]: value = "task-1695394" [ 1422.919027] env[62820]: _type = "Task" [ 1422.919027] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.919449] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53edbf5e-8d2b-4775-8fd7-e6e2ff22d649 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.931119] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695394, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.001022] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1423.001022] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1423.001022] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Deleting the datastore file [datastore1] 2f917745-28ef-4dfe-8c09-45c15a80145d {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1423.001022] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38d6694d-8454-410d-8f1f-bdd359cced27 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.009212] env[62820]: DEBUG oslo_vmware.api [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1423.009212] env[62820]: value = "task-1695396" [ 1423.009212] env[62820]: _type = "Task" [ 1423.009212] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.023419] env[62820]: DEBUG oslo_vmware.api [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695396, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.064363] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695393, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.286450] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.341156] env[62820]: DEBUG oslo_concurrency.lockutils [req-8e85e611-00ce-495a-9d06-f8b3535b1fa8 req-b9704d42-e92a-4d27-b506-8e806b2c2298 service nova] Releasing lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.415797] env[62820]: DEBUG nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1423.432607] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695394, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.453061] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f591cf-0747-4b9d-8ae7-4afa9d2e82ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.463229] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879ec2f9-daf0-415d-90de-6d7d66261be8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.498734] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30666526-53ce-4335-a5b4-edbc3ef161b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.508778] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc9d53a-ac9e-4278-a5a9-964d0344f54c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.524984] env[62820]: DEBUG nova.compute.provider_tree [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1423.530454] env[62820]: DEBUG oslo_vmware.api [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695396, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.497024} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.530978] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1423.531191] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1423.531374] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1423.531546] env[62820]: INFO nova.compute.manager [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1423.531785] env[62820]: DEBUG oslo.service.loopingcall [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.532277] env[62820]: DEBUG nova.compute.manager [-] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1423.532381] env[62820]: DEBUG nova.network.neutron [-] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1423.552988] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695393, 'name': CreateVM_Task, 'duration_secs': 0.529741} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.553175] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1423.553938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.554179] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.555139] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1423.555139] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17302a41-cb55-432d-a219-1e288a4f97e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.560984] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1423.560984] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529a3c90-06d6-dae7-0b8e-40aa493dc0a3" [ 1423.560984] env[62820]: _type = "Task" [ 1423.560984] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.571139] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529a3c90-06d6-dae7-0b8e-40aa493dc0a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.573297] env[62820]: DEBUG nova.network.neutron [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Successfully updated port: 0b1a477f-8c32-4c99-892f-23f8332338e2 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1423.780139] env[62820]: DEBUG nova.compute.manager [req-227952c7-6457-457f-ac00-c69619242668 req-da089b00-69ca-4193-9dc4-680010a8f358 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Received event network-vif-plugged-0b1a477f-8c32-4c99-892f-23f8332338e2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1423.780397] env[62820]: DEBUG oslo_concurrency.lockutils [req-227952c7-6457-457f-ac00-c69619242668 req-da089b00-69ca-4193-9dc4-680010a8f358 service nova] Acquiring lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.780914] env[62820]: DEBUG oslo_concurrency.lockutils [req-227952c7-6457-457f-ac00-c69619242668 req-da089b00-69ca-4193-9dc4-680010a8f358 service nova] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.781030] env[62820]: DEBUG oslo_concurrency.lockutils [req-227952c7-6457-457f-ac00-c69619242668 req-da089b00-69ca-4193-9dc4-680010a8f358 service nova] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.781216] env[62820]: DEBUG nova.compute.manager [req-227952c7-6457-457f-ac00-c69619242668 req-da089b00-69ca-4193-9dc4-680010a8f358 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] No waiting events found dispatching network-vif-plugged-0b1a477f-8c32-4c99-892f-23f8332338e2 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1423.781405] env[62820]: WARNING nova.compute.manager [req-227952c7-6457-457f-ac00-c69619242668 req-da089b00-69ca-4193-9dc4-680010a8f358 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Received unexpected event network-vif-plugged-0b1a477f-8c32-4c99-892f-23f8332338e2 for instance with vm_state building and task_state spawning. [ 1423.945039] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695394, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.954734] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.034233] env[62820]: DEBUG nova.scheduler.client.report [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1424.076044] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529a3c90-06d6-dae7-0b8e-40aa493dc0a3, 'name': SearchDatastore_Task, 'duration_secs': 0.016651} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.076392] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.076632] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1424.076906] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.079190] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.079468] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1424.080328] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54fa3a4d-2d4d-4505-a6ba-108b3cb51212 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.091874] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1424.092270] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1424.093428] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9575f183-2bbf-46d3-b0f1-231ae10fc6dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.100220] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1424.100220] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52092902-0236-9f99-e609-0eb03b26dcb0" [ 1424.100220] env[62820]: _type = "Task" [ 1424.100220] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.109957] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52092902-0236-9f99-e609-0eb03b26dcb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.184443] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.186376] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.186376] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.186376] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.186376] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.188450] env[62820]: INFO nova.compute.manager [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Terminating instance [ 1424.444703] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695394, 'name': ReconfigVM_Task, 'duration_secs': 1.029463} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.448161] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfigured VM instance instance-00000023 to attach disk [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e/ab21fd61-3a44-42fa-92be-51214b0a9a1e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1424.449729] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a8da7e1-d9eb-41e3-b0fb-79ee30f56146 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.457666] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1424.457666] env[62820]: value = "task-1695397" [ 1424.457666] env[62820]: _type = "Task" [ 1424.457666] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.474091] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695397, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.544602] env[62820]: DEBUG nova.compute.manager [req-47a768df-67c6-4d6b-b18a-851885ee7832 req-5da4d41d-117b-4454-ac37-1cca0f40e70e service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Received event network-vif-deleted-0e52122a-94ee-4e33-92b4-777d631cef4b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1424.544813] env[62820]: INFO nova.compute.manager [req-47a768df-67c6-4d6b-b18a-851885ee7832 req-5da4d41d-117b-4454-ac37-1cca0f40e70e service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Neutron deleted interface 0e52122a-94ee-4e33-92b4-777d631cef4b; detaching it from the instance and deleting it from the info cache [ 1424.544985] env[62820]: DEBUG nova.network.neutron [req-47a768df-67c6-4d6b-b18a-851885ee7832 req-5da4d41d-117b-4454-ac37-1cca0f40e70e service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.549407] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.549877] env[62820]: DEBUG nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1424.556039] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.521s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.556039] env[62820]: INFO nova.compute.claims [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1424.619064] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52092902-0236-9f99-e609-0eb03b26dcb0, 'name': SearchDatastore_Task, 'duration_secs': 0.022025} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.619884] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d07013d-5e03-4a60-9557-594a94c46073 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.626392] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1424.626392] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527eb10c-89a8-a9a9-6fae-c7d42e71393d" [ 1424.626392] env[62820]: _type = "Task" [ 1424.626392] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.636366] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527eb10c-89a8-a9a9-6fae-c7d42e71393d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.694116] env[62820]: DEBUG nova.compute.manager [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1424.694116] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1424.694116] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f210ca9f-5d4c-4383-b1f4-9b795ffa9ac5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.704355] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1424.704810] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5727de4-dbfb-4161-8d83-7459e9a6d536 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.715370] env[62820]: DEBUG oslo_vmware.api [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1424.715370] env[62820]: value = "task-1695398" [ 1424.715370] env[62820]: _type = "Task" [ 1424.715370] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.725055] env[62820]: DEBUG oslo_vmware.api [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695398, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.736344] env[62820]: DEBUG nova.network.neutron [-] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.920622] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "498236b7-3688-4ab1-a604-a9737ba058e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.920622] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.953016] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Acquiring lock "90ea0c16-739a-4132-ac36-e154a846b9c2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.953327] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.971709] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695397, 'name': Rename_Task, 'duration_secs': 0.301212} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.972215] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1424.972828] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c9ad00e-ad98-47c5-a46c-c835cb0a6bd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.980341] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1424.980341] env[62820]: value = "task-1695399" [ 1424.980341] env[62820]: _type = "Task" [ 1424.980341] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.990012] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.054599] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e812fbbb-14b3-42eb-9053-ed5092a3c93a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.059711] env[62820]: DEBUG nova.compute.utils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1425.064047] env[62820]: DEBUG nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1425.064293] env[62820]: DEBUG nova.network.neutron [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1425.069020] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776149cf-5278-47c6-a9bd-623a57ff2cdb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.110988] env[62820]: DEBUG nova.compute.manager [req-47a768df-67c6-4d6b-b18a-851885ee7832 req-5da4d41d-117b-4454-ac37-1cca0f40e70e service nova] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Detach interface failed, port_id=0e52122a-94ee-4e33-92b4-777d631cef4b, reason: Instance 2f917745-28ef-4dfe-8c09-45c15a80145d could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1425.136828] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527eb10c-89a8-a9a9-6fae-c7d42e71393d, 'name': SearchDatastore_Task, 'duration_secs': 0.012295} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.137296] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.140017] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 31639194-b0c4-4eb9-a6f4-e61b067c807f/31639194-b0c4-4eb9-a6f4-e61b067c807f.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1425.140017] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-762b2cea-a6fd-42c0-a97c-cfa358ce4984 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.144960] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1425.144960] env[62820]: value = "task-1695400" [ 1425.144960] env[62820]: _type = "Task" [ 1425.144960] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.154952] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.231064] env[62820]: DEBUG oslo_vmware.api [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695398, 'name': PowerOffVM_Task, 'duration_secs': 0.411849} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.231415] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1425.231733] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1425.231924] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd7b962d-a99e-4bed-a7df-3645bd46ff2e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.244458] env[62820]: INFO nova.compute.manager [-] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Took 1.71 seconds to deallocate network for instance. [ 1425.273906] env[62820]: DEBUG nova.policy [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf77578635f74b52970b2d7580c1bfd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b9015dc7894a1d98bf0bb73bdf7636', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1425.322250] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1425.322385] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1425.322586] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Deleting the datastore file [datastore1] 06fb6034-e010-49bd-9e5e-7699a43dd5a9 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.322749] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3970ac34-d753-4787-af15-9d86d9f0b770 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.333127] env[62820]: DEBUG oslo_vmware.api [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for the task: (returnval){ [ 1425.333127] env[62820]: value = "task-1695402" [ 1425.333127] env[62820]: _type = "Task" [ 1425.333127] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.341455] env[62820]: DEBUG oslo_vmware.api [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.423168] env[62820]: DEBUG nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1425.460423] env[62820]: INFO nova.compute.manager [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Detaching volume deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7 [ 1425.496485] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695399, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.515830] env[62820]: INFO nova.virt.block_device [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Attempting to driver detach volume deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7 from mountpoint /dev/sdb [ 1425.515890] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1425.516194] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353493', 'volume_id': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'name': 'volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '90ea0c16-739a-4132-ac36-e154a846b9c2', 'attached_at': '', 'detached_at': '', 'volume_id': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'serial': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1425.516979] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15c9cac-302c-4868-9c6d-6d40c20919e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.545355] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7379794a-e841-4bb0-9f09-7d7d12bc2fd4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.558352] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b11063-97bd-4e86-a9a8-a52db212ecc8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.583354] env[62820]: DEBUG nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1425.590547] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b8e198-cee0-4437-bded-49b9133ad6cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.610719] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] The volume has not been displaced from its original location: [datastore1] volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7/volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1425.616909] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Reconfiguring VM instance instance-00000006 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1425.620658] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d43061ad-4cb8-47a3-83b9-6654f28ed00c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.641365] env[62820]: DEBUG oslo_vmware.api [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Waiting for the task: (returnval){ [ 1425.641365] env[62820]: value = "task-1695403" [ 1425.641365] env[62820]: _type = "Task" [ 1425.641365] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.658953] env[62820]: DEBUG oslo_vmware.api [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Task: {'id': task-1695403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.663205] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695400, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.706870] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107f6dd2-1bbe-4f75-9de2-3cd0c368d3e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.711246] env[62820]: DEBUG nova.network.neutron [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Successfully created port: c83c67d0-648f-4a10-b8a2-7e83e079d0f3 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1425.719180] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22622a45-d9be-47d2-9bad-674d15ec5cef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.749047] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d85d0a9-05fa-4f28-8701-1e3052525e1b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.752445] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.757804] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e7b8a1-a666-4788-847d-ebcf9ccd49e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.774038] env[62820]: DEBUG nova.compute.provider_tree [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1425.843257] env[62820]: DEBUG oslo_vmware.api [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Task: {'id': task-1695402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.466084} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.844532] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1425.844532] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1425.844936] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1425.845935] env[62820]: INFO nova.compute.manager [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1425.845935] env[62820]: DEBUG oslo.service.loopingcall [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.845935] env[62820]: DEBUG nova.compute.manager [-] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1425.845935] env[62820]: DEBUG nova.network.neutron [-] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1425.871215] env[62820]: DEBUG nova.compute.manager [req-d4dc5620-9c85-4118-8dc3-36d1bdcd03f3 req-f39dba30-8a52-42b4-a4e5-599b7c6970a7 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Received event network-changed-0b1a477f-8c32-4c99-892f-23f8332338e2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1425.871407] env[62820]: DEBUG nova.compute.manager [req-d4dc5620-9c85-4118-8dc3-36d1bdcd03f3 req-f39dba30-8a52-42b4-a4e5-599b7c6970a7 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Refreshing instance network info cache due to event network-changed-0b1a477f-8c32-4c99-892f-23f8332338e2. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1425.871620] env[62820]: DEBUG oslo_concurrency.lockutils [req-d4dc5620-9c85-4118-8dc3-36d1bdcd03f3 req-f39dba30-8a52-42b4-a4e5-599b7c6970a7 service nova] Acquiring lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.871755] env[62820]: DEBUG oslo_concurrency.lockutils [req-d4dc5620-9c85-4118-8dc3-36d1bdcd03f3 req-f39dba30-8a52-42b4-a4e5-599b7c6970a7 service nova] Acquired lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.872401] env[62820]: DEBUG nova.network.neutron [req-d4dc5620-9c85-4118-8dc3-36d1bdcd03f3 req-f39dba30-8a52-42b4-a4e5-599b7c6970a7 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Refreshing network info cache for port 0b1a477f-8c32-4c99-892f-23f8332338e2 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1425.946788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.991696] env[62820]: DEBUG oslo_vmware.api [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695399, 'name': PowerOnVM_Task, 'duration_secs': 0.704395} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.992103] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1425.992357] env[62820]: INFO nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Took 9.47 seconds to spawn the instance on the hypervisor. [ 1425.992556] env[62820]: DEBUG nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1425.993359] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9270bb3c-f4ec-4c4b-be83-9145375f30e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.153053] env[62820]: DEBUG oslo_vmware.api [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Task: {'id': task-1695403, 'name': ReconfigVM_Task, 'duration_secs': 0.420778} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.153687] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Reconfigured VM instance instance-00000006 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1426.163252] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e61813fb-4043-45fb-9251-f8ef27d8999d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.173127] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586053} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.173710] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 31639194-b0c4-4eb9-a6f4-e61b067c807f/31639194-b0c4-4eb9-a6f4-e61b067c807f.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1426.173918] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1426.174565] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6aec9c6e-75c3-485e-9e43-09b2e3f09fd2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.180352] env[62820]: DEBUG oslo_vmware.api [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Waiting for the task: (returnval){ [ 1426.180352] env[62820]: value = "task-1695404" [ 1426.180352] env[62820]: _type = "Task" [ 1426.180352] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.182348] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1426.182348] env[62820]: value = "task-1695405" [ 1426.182348] env[62820]: _type = "Task" [ 1426.182348] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.197269] env[62820]: DEBUG oslo_vmware.api [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Task: {'id': task-1695404, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.200314] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695405, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.298056] env[62820]: ERROR nova.scheduler.client.report [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [req-19e1e5f0-c599-4a33-a385-e901fd2725bf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-19e1e5f0-c599-4a33-a385-e901fd2725bf"}]} [ 1426.324775] env[62820]: DEBUG nova.scheduler.client.report [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1426.365231] env[62820]: DEBUG nova.scheduler.client.report [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1426.368088] env[62820]: DEBUG nova.compute.provider_tree [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1426.383147] env[62820]: DEBUG nova.scheduler.client.report [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1426.405651] env[62820]: DEBUG nova.scheduler.client.report [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1426.456174] env[62820]: DEBUG nova.network.neutron [req-d4dc5620-9c85-4118-8dc3-36d1bdcd03f3 req-f39dba30-8a52-42b4-a4e5-599b7c6970a7 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1426.519400] env[62820]: INFO nova.compute.manager [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Took 40.91 seconds to build instance. [ 1426.594369] env[62820]: DEBUG nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1426.626326] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1426.626696] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1426.626778] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1426.627296] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1426.627296] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1426.627296] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1426.627476] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1426.627640] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1426.630408] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1426.630655] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1426.630861] env[62820]: DEBUG nova.virt.hardware [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1426.631735] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4f7bff-9bed-44d8-8620-c5615452f097 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.647227] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b552a4e5-ca38-449b-b1f1-be00803b86a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.695675] env[62820]: DEBUG oslo_vmware.api [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Task: {'id': task-1695404, 'name': ReconfigVM_Task, 'duration_secs': 0.309911} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.696397] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353493', 'volume_id': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'name': 'volume-deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '90ea0c16-739a-4132-ac36-e154a846b9c2', 'attached_at': '', 'detached_at': '', 'volume_id': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7', 'serial': 'deb3cd23-8fb9-4f5c-9a28-3f93c0d069a7'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1426.701741] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695405, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.302463} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.702017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1426.704485] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb5dede-0aa5-4781-9399-30a31487032b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.729209] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 31639194-b0c4-4eb9-a6f4-e61b067c807f/31639194-b0c4-4eb9-a6f4-e61b067c807f.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1426.731916] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91e83df4-0764-4efa-88bf-65c8e2a74689 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.753822] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1426.753822] env[62820]: value = "task-1695406" [ 1426.753822] env[62820]: _type = "Task" [ 1426.753822] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.758095] env[62820]: DEBUG nova.network.neutron [req-d4dc5620-9c85-4118-8dc3-36d1bdcd03f3 req-f39dba30-8a52-42b4-a4e5-599b7c6970a7 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.764181] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695406, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.822226] env[62820]: DEBUG nova.network.neutron [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Successfully updated port: 1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1426.932265] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabccd84-4cfc-485c-b7ca-c3ab1ddd42f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.940385] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9053da3d-5f6a-4b27-996a-2396c3ada4f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.974681] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2a181c-840f-45ad-9ac4-cd8e9aeb8330 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.982813] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edce03b-f4ff-418a-8fb8-9e253f2d1377 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.996970] env[62820]: DEBUG nova.compute.provider_tree [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1427.021228] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4812d644-7756-4202-8a56-4f7bda3dc471 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.798s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.260850] env[62820]: DEBUG oslo_concurrency.lockutils [req-d4dc5620-9c85-4118-8dc3-36d1bdcd03f3 req-f39dba30-8a52-42b4-a4e5-599b7c6970a7 service nova] Releasing lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.266233] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695406, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.267804] env[62820]: DEBUG nova.objects.instance [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Lazy-loading 'flavor' on Instance uuid 90ea0c16-739a-4132-ac36-e154a846b9c2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.318428] env[62820]: INFO nova.compute.manager [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Rebuilding instance [ 1427.325962] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.327976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquired lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.327976] env[62820]: DEBUG nova.network.neutron [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1427.332327] env[62820]: DEBUG nova.network.neutron [-] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.367459] env[62820]: DEBUG nova.compute.manager [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1427.368586] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5469b1-f805-4c2d-bfa3-9ac36e1e7b71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.381919] env[62820]: DEBUG nova.compute.manager [req-067ebce8-e71d-46fb-b587-6813f6fc7308 req-2de81e50-3681-404f-8fd0-012a5829269b service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Received event network-vif-plugged-c83c67d0-648f-4a10-b8a2-7e83e079d0f3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1427.382167] env[62820]: DEBUG oslo_concurrency.lockutils [req-067ebce8-e71d-46fb-b587-6813f6fc7308 req-2de81e50-3681-404f-8fd0-012a5829269b service nova] Acquiring lock "706d42cd-53d9-4976-bc67-98816a40fff4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.382528] env[62820]: DEBUG oslo_concurrency.lockutils [req-067ebce8-e71d-46fb-b587-6813f6fc7308 req-2de81e50-3681-404f-8fd0-012a5829269b service nova] Lock "706d42cd-53d9-4976-bc67-98816a40fff4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.383120] env[62820]: DEBUG oslo_concurrency.lockutils [req-067ebce8-e71d-46fb-b587-6813f6fc7308 req-2de81e50-3681-404f-8fd0-012a5829269b service nova] Lock "706d42cd-53d9-4976-bc67-98816a40fff4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.383120] env[62820]: DEBUG nova.compute.manager [req-067ebce8-e71d-46fb-b587-6813f6fc7308 req-2de81e50-3681-404f-8fd0-012a5829269b service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] No waiting events found dispatching network-vif-plugged-c83c67d0-648f-4a10-b8a2-7e83e079d0f3 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1427.383280] env[62820]: WARNING nova.compute.manager [req-067ebce8-e71d-46fb-b587-6813f6fc7308 req-2de81e50-3681-404f-8fd0-012a5829269b service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Received unexpected event network-vif-plugged-c83c67d0-648f-4a10-b8a2-7e83e079d0f3 for instance with vm_state building and task_state spawning. [ 1427.539191] env[62820]: DEBUG nova.scheduler.client.report [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 58 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1427.540942] env[62820]: DEBUG nova.compute.provider_tree [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 58 to 59 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1427.540942] env[62820]: DEBUG nova.compute.provider_tree [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1427.558542] env[62820]: DEBUG nova.network.neutron [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Successfully updated port: c83c67d0-648f-4a10-b8a2-7e83e079d0f3 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1427.766710] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695406, 'name': ReconfigVM_Task, 'duration_secs': 0.529055} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.766710] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 31639194-b0c4-4eb9-a6f4-e61b067c807f/31639194-b0c4-4eb9-a6f4-e61b067c807f.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1427.767409] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-713fe6c0-8da4-4ae6-9cd6-64eb9e3fd243 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.774806] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1427.774806] env[62820]: value = "task-1695410" [ 1427.774806] env[62820]: _type = "Task" [ 1427.774806] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.784654] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695410, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.837337] env[62820]: INFO nova.compute.manager [-] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Took 1.99 seconds to deallocate network for instance. [ 1427.865219] env[62820]: DEBUG nova.network.neutron [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1428.048250] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.495s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.048792] env[62820]: DEBUG nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1428.052639] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.173s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.052885] env[62820]: DEBUG nova.objects.instance [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lazy-loading 'resources' on Instance uuid cc2b0ed5-b711-487d-8bfc-ee2745c9ef89 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1428.066621] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.066794] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.067703] env[62820]: DEBUG nova.network.neutron [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1428.122833] env[62820]: DEBUG nova.compute.manager [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Received event network-vif-plugged-1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1428.122833] env[62820]: DEBUG oslo_concurrency.lockutils [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] Acquiring lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.122833] env[62820]: DEBUG oslo_concurrency.lockutils [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.122833] env[62820]: DEBUG oslo_concurrency.lockutils [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.122833] env[62820]: DEBUG nova.compute.manager [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] No waiting events found dispatching network-vif-plugged-1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1428.122833] env[62820]: WARNING nova.compute.manager [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Received unexpected event network-vif-plugged-1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f for instance with vm_state building and task_state spawning. [ 1428.122833] env[62820]: DEBUG nova.compute.manager [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Received event network-changed-1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1428.123640] env[62820]: DEBUG nova.compute.manager [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Refreshing instance network info cache due to event network-changed-1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1428.123952] env[62820]: DEBUG oslo_concurrency.lockutils [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] Acquiring lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.207083] env[62820]: DEBUG nova.network.neutron [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Updating instance_info_cache with network_info: [{"id": "0b1a477f-8c32-4c99-892f-23f8332338e2", "address": "fa:16:3e:19:8c:b7", "network": {"id": "fa8ad356-4923-4df8-91fb-4a94668b2e08", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-157646262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b1a477f-8c", "ovs_interfaceid": "0b1a477f-8c32-4c99-892f-23f8332338e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f", "address": "fa:16:3e:40:d0:62", "network": {"id": "d148355a-8625-4ad0-b1e2-d51c0b4345a8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-574156154", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a88c41e-8f", "ovs_interfaceid": "1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.280738] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcb3ea25-0f12-4fca-b835-6c833157ee10 tempest-VolumesAssistedSnapshotsTest-526334994 tempest-VolumesAssistedSnapshotsTest-526334994-project-admin] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.327s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.288036] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695410, 'name': Rename_Task, 'duration_secs': 0.157479} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.288036] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1428.288629] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b90e0277-7fe2-4bff-9d3d-4f271124c442 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.294885] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1428.294885] env[62820]: value = "task-1695412" [ 1428.294885] env[62820]: _type = "Task" [ 1428.294885] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.303694] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695412, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.346385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.384038] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1428.384930] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84f7ef72-331e-4e7d-9bcd-5a74af4a6bea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.396988] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1428.396988] env[62820]: value = "task-1695413" [ 1428.396988] env[62820]: _type = "Task" [ 1428.396988] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.406405] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695413, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.554652] env[62820]: DEBUG nova.compute.utils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1428.559935] env[62820]: DEBUG nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1428.560148] env[62820]: DEBUG nova.network.neutron [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1428.613466] env[62820]: DEBUG nova.policy [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b328ffc83d344899fcbbb6e9ade1698', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bdc42fe98fb43d7bd92e2dd789aff93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1428.614682] env[62820]: DEBUG nova.network.neutron [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1428.710731] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Releasing lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.711187] env[62820]: DEBUG nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Instance network_info: |[{"id": "0b1a477f-8c32-4c99-892f-23f8332338e2", "address": "fa:16:3e:19:8c:b7", "network": {"id": "fa8ad356-4923-4df8-91fb-4a94668b2e08", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-157646262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b1a477f-8c", "ovs_interfaceid": "0b1a477f-8c32-4c99-892f-23f8332338e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f", "address": "fa:16:3e:40:d0:62", "network": {"id": "d148355a-8625-4ad0-b1e2-d51c0b4345a8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-574156154", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a88c41e-8f", "ovs_interfaceid": "1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1428.711447] env[62820]: DEBUG oslo_concurrency.lockutils [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] Acquired lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.711628] env[62820]: DEBUG nova.network.neutron [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Refreshing network info cache for port 1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1428.712787] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:8c:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f972c061-0cd5-4aed-8cfb-42cc4a08835a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b1a477f-8c32-4c99-892f-23f8332338e2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:d0:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a06a63d6-2aeb-4084-8022-f804cac3fa74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1428.727243] env[62820]: DEBUG oslo.service.loopingcall [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1428.730939] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1428.731868] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d5f26cc-381a-42d3-8496-f16635995689 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.760679] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1428.760679] env[62820]: value = "task-1695414" [ 1428.760679] env[62820]: _type = "Task" [ 1428.760679] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.771233] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695414, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.810137] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695412, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.842599] env[62820]: DEBUG nova.network.neutron [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Updating instance_info_cache with network_info: [{"id": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "address": "fa:16:3e:5e:98:88", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc83c67d0-64", "ovs_interfaceid": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.908024] env[62820]: DEBUG nova.compute.manager [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1428.919481] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695413, 'name': PowerOffVM_Task, 'duration_secs': 0.347336} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.922485] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1428.923240] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1428.923776] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2261e718-0ed5-4ec4-baec-372315b87fa7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.930920] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1428.930920] env[62820]: value = "task-1695415" [ 1428.930920] env[62820]: _type = "Task" [ 1428.930920] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.940065] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.006126] env[62820]: DEBUG nova.network.neutron [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Successfully created port: 7aa70f31-5a35-418a-a31b-0258e18a6cf7 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1429.062966] env[62820]: DEBUG nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1429.069201] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c674d30-df9a-4a9e-b8b1-5ba34d3299a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.077163] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd033842-6a67-49da-869b-a4621020f4a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.113567] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d313686-0ce5-48b4-bfde-e667a216d246 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.121511] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ddcf8b-fb92-425d-a021-1b72376217fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.136043] env[62820]: DEBUG nova.compute.provider_tree [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1429.275462] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695414, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.307611] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695412, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.347613] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.347965] env[62820]: DEBUG nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Instance network_info: |[{"id": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "address": "fa:16:3e:5e:98:88", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc83c67d0-64", "ovs_interfaceid": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1429.348429] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:98:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c83c67d0-648f-4a10-b8a2-7e83e079d0f3', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1429.356302] env[62820]: DEBUG oslo.service.loopingcall [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.356302] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1429.356429] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0cf7259-f592-41ce-b190-52bd705f454b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.379553] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1429.379553] env[62820]: value = "task-1695416" [ 1429.379553] env[62820]: _type = "Task" [ 1429.379553] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.387282] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695416, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.429190] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.446453] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1429.448154] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1429.448154] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353396', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'name': 'volume-763afde5-c692-44d0-a083-7f09ae379a22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e4668ed-801a-4105-8b9e-cf37be91c8b8', 'attached_at': '', 'detached_at': '', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'serial': '763afde5-c692-44d0-a083-7f09ae379a22'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1429.448154] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88d44bc-769a-450f-9b99-39edd59f2199 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.468374] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e048fee-3fde-45c3-896f-14f58cea0219 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.472568] env[62820]: DEBUG nova.compute.manager [req-61217cf0-492a-4783-8885-131cb7c3fb83 req-cb049e11-d736-4486-9f37-1f43b333da95 service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Received event network-changed-c83c67d0-648f-4a10-b8a2-7e83e079d0f3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1429.472782] env[62820]: DEBUG nova.compute.manager [req-61217cf0-492a-4783-8885-131cb7c3fb83 req-cb049e11-d736-4486-9f37-1f43b333da95 service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Refreshing instance network info cache due to event network-changed-c83c67d0-648f-4a10-b8a2-7e83e079d0f3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1429.473017] env[62820]: DEBUG oslo_concurrency.lockutils [req-61217cf0-492a-4783-8885-131cb7c3fb83 req-cb049e11-d736-4486-9f37-1f43b333da95 service nova] Acquiring lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.473195] env[62820]: DEBUG oslo_concurrency.lockutils [req-61217cf0-492a-4783-8885-131cb7c3fb83 req-cb049e11-d736-4486-9f37-1f43b333da95 service nova] Acquired lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.473370] env[62820]: DEBUG nova.network.neutron [req-61217cf0-492a-4783-8885-131cb7c3fb83 req-cb049e11-d736-4486-9f37-1f43b333da95 service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Refreshing network info cache for port c83c67d0-648f-4a10-b8a2-7e83e079d0f3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1429.481246] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273be9f5-520f-488c-be6d-dc75178cbdf6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.507319] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acab29a4-0198-4afb-89a4-f53602a99896 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.527379] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] The volume has not been displaced from its original location: [datastore1] volume-763afde5-c692-44d0-a083-7f09ae379a22/volume-763afde5-c692-44d0-a083-7f09ae379a22.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1429.533850] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Reconfiguring VM instance instance-00000016 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1429.533850] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbf7963c-b399-4978-a561-451bcf8e7340 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.551659] env[62820]: DEBUG nova.network.neutron [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Updated VIF entry in instance network info cache for port 1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1429.552205] env[62820]: DEBUG nova.network.neutron [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Updating instance_info_cache with network_info: [{"id": "0b1a477f-8c32-4c99-892f-23f8332338e2", "address": "fa:16:3e:19:8c:b7", "network": {"id": "fa8ad356-4923-4df8-91fb-4a94668b2e08", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-157646262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b1a477f-8c", "ovs_interfaceid": "0b1a477f-8c32-4c99-892f-23f8332338e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f", "address": "fa:16:3e:40:d0:62", "network": {"id": "d148355a-8625-4ad0-b1e2-d51c0b4345a8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-574156154", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a88c41e-8f", "ovs_interfaceid": "1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.555537] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1429.555537] env[62820]: value = "task-1695417" [ 1429.555537] env[62820]: _type = "Task" [ 1429.555537] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.565211] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695417, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.640771] env[62820]: DEBUG nova.scheduler.client.report [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1429.776154] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695414, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.808151] env[62820]: DEBUG oslo_vmware.api [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695412, 'name': PowerOnVM_Task, 'duration_secs': 1.511009} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.808439] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1429.808643] env[62820]: INFO nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Took 10.56 seconds to spawn the instance on the hypervisor. [ 1429.808823] env[62820]: DEBUG nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1429.809622] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195f3f91-3cbc-49a6-ab7a-36054aab4809 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.889830] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695416, 'name': CreateVM_Task, 'duration_secs': 0.471423} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.889830] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1429.890530] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.890700] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.891050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1429.891305] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcf9e9ec-0a3a-416e-868f-bc01d71160a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.896130] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1429.896130] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ac25be-75a4-5fa9-c804-83eb4d4326b1" [ 1429.896130] env[62820]: _type = "Task" [ 1429.896130] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.903927] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ac25be-75a4-5fa9-c804-83eb4d4326b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.055301] env[62820]: DEBUG oslo_concurrency.lockutils [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] Releasing lock "refresh_cache-262d0714-d7d7-443c-9927-ef03ba9f230e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.055551] env[62820]: DEBUG nova.compute.manager [req-b69bc192-ce1d-47ba-995b-7bd168d77d2d req-d206e76d-2ac8-495b-9122-18e09ca2ddfe service nova] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Received event network-vif-deleted-e926a317-6604-4398-a77f-420aaa352075 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1430.068323] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695417, 'name': ReconfigVM_Task, 'duration_secs': 0.257923} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.068632] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Reconfigured VM instance instance-00000016 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1430.078837] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-167d6447-bb26-490a-8217-68901da07172 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.095181] env[62820]: DEBUG nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1430.109401] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1430.109401] env[62820]: value = "task-1695418" [ 1430.109401] env[62820]: _type = "Task" [ 1430.109401] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.127986] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695418, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.144441] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1430.144769] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1430.144929] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1430.149057] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1430.149057] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1430.149057] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1430.149057] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1430.149057] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1430.149057] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1430.149057] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1430.149057] env[62820]: DEBUG nova.virt.hardware [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1430.150141] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.154695] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889ad5cd-fc51-45ad-b80c-5f1ce2bb20f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.162394] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.678s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.162827] env[62820]: DEBUG nova.objects.instance [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lazy-loading 'resources' on Instance uuid 9068670d-f323-4180-92f9-f19737e955e2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1430.176306] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abddac7f-07a3-4db2-be72-7ad523616c76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.213432] env[62820]: INFO nova.scheduler.client.report [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Deleted allocations for instance cc2b0ed5-b711-487d-8bfc-ee2745c9ef89 [ 1430.278537] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695414, 'name': CreateVM_Task, 'duration_secs': 1.298349} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.282741] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1430.283957] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.326805] env[62820]: DEBUG nova.network.neutron [req-61217cf0-492a-4783-8885-131cb7c3fb83 req-cb049e11-d736-4486-9f37-1f43b333da95 service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Updated VIF entry in instance network info cache for port c83c67d0-648f-4a10-b8a2-7e83e079d0f3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1430.326805] env[62820]: DEBUG nova.network.neutron [req-61217cf0-492a-4783-8885-131cb7c3fb83 req-cb049e11-d736-4486-9f37-1f43b333da95 service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Updating instance_info_cache with network_info: [{"id": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "address": "fa:16:3e:5e:98:88", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc83c67d0-64", "ovs_interfaceid": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.332881] env[62820]: INFO nova.compute.manager [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Took 44.60 seconds to build instance. [ 1430.405907] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ac25be-75a4-5fa9-c804-83eb4d4326b1, 'name': SearchDatastore_Task, 'duration_secs': 0.016328} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.406229] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.406460] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1430.406719] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.406822] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.407086] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1430.407384] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.407696] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1430.407913] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57f126df-24b1-4b9e-800b-e90cf5a42837 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.410172] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b32ed6d-9ab1-4a29-9ca3-adaf48dfa8c2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.414773] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1430.414773] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5268449a-2983-5033-840e-973a3083e5c8" [ 1430.414773] env[62820]: _type = "Task" [ 1430.414773] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.420484] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1430.420660] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1430.421735] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-888ee439-b791-4dab-b08a-b9562a776070 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.426496] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5268449a-2983-5033-840e-973a3083e5c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.429209] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1430.429209] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e95cc9-6312-e014-6d7c-7984fded1534" [ 1430.429209] env[62820]: _type = "Task" [ 1430.429209] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.436644] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e95cc9-6312-e014-6d7c-7984fded1534, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.540162] env[62820]: DEBUG nova.compute.manager [None req-a1062be3-65bd-4cc0-9f1f-1ac6debda923 tempest-ServerExternalEventsTest-783142580 tempest-ServerExternalEventsTest-783142580-project] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Received event network-changed {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1430.540362] env[62820]: DEBUG nova.compute.manager [None req-a1062be3-65bd-4cc0-9f1f-1ac6debda923 tempest-ServerExternalEventsTest-783142580 tempest-ServerExternalEventsTest-783142580-project] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Refreshing instance network info cache due to event network-changed. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1430.540578] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1062be3-65bd-4cc0-9f1f-1ac6debda923 tempest-ServerExternalEventsTest-783142580 tempest-ServerExternalEventsTest-783142580-project] Acquiring lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.540735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1062be3-65bd-4cc0-9f1f-1ac6debda923 tempest-ServerExternalEventsTest-783142580 tempest-ServerExternalEventsTest-783142580-project] Acquired lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.540897] env[62820]: DEBUG nova.network.neutron [None req-a1062be3-65bd-4cc0-9f1f-1ac6debda923 tempest-ServerExternalEventsTest-783142580 tempest-ServerExternalEventsTest-783142580-project] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1430.614534] env[62820]: DEBUG nova.network.neutron [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Successfully updated port: 7aa70f31-5a35-418a-a31b-0258e18a6cf7 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1430.624829] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695418, 'name': ReconfigVM_Task, 'duration_secs': 0.181174} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.626419] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353396', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'name': 'volume-763afde5-c692-44d0-a083-7f09ae379a22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e4668ed-801a-4105-8b9e-cf37be91c8b8', 'attached_at': '', 'detached_at': '', 'volume_id': '763afde5-c692-44d0-a083-7f09ae379a22', 'serial': '763afde5-c692-44d0-a083-7f09ae379a22'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1430.626764] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.627539] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a54c53-021b-48cb-bc42-ac038fa53e4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.634598] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1430.634850] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bde3b193-a63e-44af-8602-0f89b7c47a8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.706663] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1430.706946] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1430.707238] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Deleting the datastore file [datastore1] 4e4668ed-801a-4105-8b9e-cf37be91c8b8 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1430.707977] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4c03145-bc56-45e0-a256-d1649c6a6a33 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.718691] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for the task: (returnval){ [ 1430.718691] env[62820]: value = "task-1695421" [ 1430.718691] env[62820]: _type = "Task" [ 1430.718691] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.726503] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dc273a73-b358-48e0-8205-7f8c076d6236 tempest-ServerRescueTestJSONUnderV235-269499849 tempest-ServerRescueTestJSONUnderV235-269499849-project-member] Lock "cc2b0ed5-b711-487d-8bfc-ee2745c9ef89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.549s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.731091] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.829650] env[62820]: DEBUG oslo_concurrency.lockutils [req-61217cf0-492a-4783-8885-131cb7c3fb83 req-cb049e11-d736-4486-9f37-1f43b333da95 service nova] Releasing lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.836367] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caffceb7-8a42-4351-8831-377cb63362c2 tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "31639194-b0c4-4eb9-a6f4-e61b067c807f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.452s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.926882] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5268449a-2983-5033-840e-973a3083e5c8, 'name': SearchDatastore_Task, 'duration_secs': 0.022073} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.929741] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.930156] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1430.930528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.943018] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e95cc9-6312-e014-6d7c-7984fded1534, 'name': SearchDatastore_Task, 'duration_secs': 0.039095} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.943018] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ad63924-5dd9-4aa5-8841-a99b797cfb9f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.946698] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1430.946698] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524b1e57-29df-5eca-1ed1-4c0ebde3d51f" [ 1430.946698] env[62820]: _type = "Task" [ 1430.946698] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.958250] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524b1e57-29df-5eca-1ed1-4c0ebde3d51f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.117951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "refresh_cache-f186854d-3f0a-4512-83b9-2c946247ccbe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.118293] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "refresh_cache-f186854d-3f0a-4512-83b9-2c946247ccbe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.118293] env[62820]: DEBUG nova.network.neutron [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1431.120039] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63db2b48-2da5-47ee-bb7e-6f46b1c426ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.130028] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b38a6b-df17-4c8d-9638-13cf8c4fac4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.164544] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda2c41a-49f0-4877-ace4-91d9f7281822 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.168320] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquiring lock "31639194-b0c4-4eb9-a6f4-e61b067c807f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.168320] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "31639194-b0c4-4eb9-a6f4-e61b067c807f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.168320] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquiring lock "31639194-b0c4-4eb9-a6f4-e61b067c807f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.168320] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "31639194-b0c4-4eb9-a6f4-e61b067c807f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.168555] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "31639194-b0c4-4eb9-a6f4-e61b067c807f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.171629] env[62820]: INFO nova.compute.manager [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Terminating instance [ 1431.176851] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e33dbe-b92e-4d23-a77c-91bd1c98eb8a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.196675] env[62820]: DEBUG nova.compute.provider_tree [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1431.228904] env[62820]: DEBUG oslo_vmware.api [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Task: {'id': task-1695421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240115} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.229268] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.229489] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.229693] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.308805] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1431.309198] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-caca1347-9270-40be-be9d-a0e77babf24b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.321018] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb98f49-218a-4e57-a3a1-573f73004f27 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.359159] env[62820]: ERROR nova.compute.manager [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Failed to detach volume 763afde5-c692-44d0-a083-7f09ae379a22 from /dev/sda: nova.exception.InstanceNotFound: Instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 could not be found. [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Traceback (most recent call last): [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self.driver.rebuild(**kwargs) [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] raise NotImplementedError() [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] NotImplementedError [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] During handling of the above exception, another exception occurred: [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Traceback (most recent call last): [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self.driver.detach_volume(context, old_connection_info, [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] return self._volumeops.detach_volume(connection_info, instance) [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self._detach_volume_vmdk(connection_info, instance) [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] stable_ref.fetch_moref(session) [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] nova.exception.InstanceNotFound: Instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 could not be found. [ 1431.359159] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] [ 1431.384346] env[62820]: DEBUG nova.network.neutron [None req-a1062be3-65bd-4cc0-9f1f-1ac6debda923 tempest-ServerExternalEventsTest-783142580 tempest-ServerExternalEventsTest-783142580-project] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Updating instance_info_cache with network_info: [{"id": "1da745d0-cf16-4aea-b62a-dcdc1c42e0c8", "address": "fa:16:3e:36:34:09", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1da745d0-cf", "ovs_interfaceid": "1da745d0-cf16-4aea-b62a-dcdc1c42e0c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.463595] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524b1e57-29df-5eca-1ed1-4c0ebde3d51f, 'name': SearchDatastore_Task, 'duration_secs': 0.031351} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.463867] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.464148] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 706d42cd-53d9-4976-bc67-98816a40fff4/706d42cd-53d9-4976-bc67-98816a40fff4.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1431.464472] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.464664] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.464883] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fade8eb1-4e28-45a8-8143-2e47371561a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.469234] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c653991c-30e3-4bf9-9a71-ef103ed25cf1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.474613] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1431.474613] env[62820]: value = "task-1695422" [ 1431.474613] env[62820]: _type = "Task" [ 1431.474613] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.482243] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.482243] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1431.483099] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61dd5a4f-f5f8-4ccc-81e9-4d6cfcf914b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.488726] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.491620] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1431.491620] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ea23a0-fdc8-1e96-cdaa-71c10f4f844c" [ 1431.491620] env[62820]: _type = "Task" [ 1431.491620] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.499842] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ea23a0-fdc8-1e96-cdaa-71c10f4f844c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.523657] env[62820]: DEBUG nova.compute.utils [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Build of instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 aborted: Failed to rebuild volume backed instance. {{(pid=62820) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1431.527527] env[62820]: ERROR nova.compute.manager [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 aborted: Failed to rebuild volume backed instance. [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Traceback (most recent call last): [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self.driver.rebuild(**kwargs) [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] raise NotImplementedError() [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] NotImplementedError [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] During handling of the above exception, another exception occurred: [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Traceback (most recent call last): [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self._detach_root_volume(context, instance, root_bdm) [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] with excutils.save_and_reraise_exception(): [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self.force_reraise() [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] raise self.value [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self.driver.detach_volume(context, old_connection_info, [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] return self._volumeops.detach_volume(connection_info, instance) [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self._detach_volume_vmdk(connection_info, instance) [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] stable_ref.fetch_moref(session) [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] nova.exception.InstanceNotFound: Instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 could not be found. [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] During handling of the above exception, another exception occurred: [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Traceback (most recent call last): [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 11223, in _error_out_instance_on_exception [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] yield [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1431.527527] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self._do_rebuild_instance_with_claim( [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self._do_rebuild_instance( [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self._rebuild_default_impl(**kwargs) [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] self._rebuild_volume_backed_instance( [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] raise exception.BuildAbortException( [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] nova.exception.BuildAbortException: Build of instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 aborted: Failed to rebuild volume backed instance. [ 1431.528754] env[62820]: ERROR nova.compute.manager [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] [ 1431.548500] env[62820]: DEBUG nova.compute.manager [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Received event network-vif-plugged-7aa70f31-5a35-418a-a31b-0258e18a6cf7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1431.548717] env[62820]: DEBUG oslo_concurrency.lockutils [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] Acquiring lock "f186854d-3f0a-4512-83b9-2c946247ccbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.548928] env[62820]: DEBUG oslo_concurrency.lockutils [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] Lock "f186854d-3f0a-4512-83b9-2c946247ccbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.549115] env[62820]: DEBUG oslo_concurrency.lockutils [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] Lock "f186854d-3f0a-4512-83b9-2c946247ccbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.549288] env[62820]: DEBUG nova.compute.manager [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] No waiting events found dispatching network-vif-plugged-7aa70f31-5a35-418a-a31b-0258e18a6cf7 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1431.549458] env[62820]: WARNING nova.compute.manager [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Received unexpected event network-vif-plugged-7aa70f31-5a35-418a-a31b-0258e18a6cf7 for instance with vm_state building and task_state spawning. [ 1431.549667] env[62820]: DEBUG nova.compute.manager [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Received event network-changed-7aa70f31-5a35-418a-a31b-0258e18a6cf7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1431.549771] env[62820]: DEBUG nova.compute.manager [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Refreshing instance network info cache due to event network-changed-7aa70f31-5a35-418a-a31b-0258e18a6cf7. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1431.549985] env[62820]: DEBUG oslo_concurrency.lockutils [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] Acquiring lock "refresh_cache-f186854d-3f0a-4512-83b9-2c946247ccbe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.680546] env[62820]: DEBUG nova.network.neutron [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1431.685845] env[62820]: DEBUG nova.compute.manager [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1431.686188] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1431.686972] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10cafec-37e6-431f-91de-f50273505a86 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.695356] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1431.695601] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e7112fc-637a-4269-9d9c-2e3abb32598d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.708390] env[62820]: DEBUG oslo_vmware.api [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1431.708390] env[62820]: value = "task-1695423" [ 1431.708390] env[62820]: _type = "Task" [ 1431.708390] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.719018] env[62820]: DEBUG oslo_vmware.api [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.724725] env[62820]: ERROR nova.scheduler.client.report [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] [req-c737524b-110c-4670-80c5-488a2d3dbc16] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c737524b-110c-4670-80c5-488a2d3dbc16"}]} [ 1431.742931] env[62820]: DEBUG nova.scheduler.client.report [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1431.762557] env[62820]: DEBUG nova.scheduler.client.report [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1431.762806] env[62820]: DEBUG nova.compute.provider_tree [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1431.779444] env[62820]: DEBUG nova.scheduler.client.report [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1431.803240] env[62820]: DEBUG nova.scheduler.client.report [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1431.872724] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquiring lock "90ea0c16-739a-4132-ac36-e154a846b9c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.873019] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.873229] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquiring lock "90ea0c16-739a-4132-ac36-e154a846b9c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.873418] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.873586] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.876094] env[62820]: INFO nova.compute.manager [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Terminating instance [ 1431.887368] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1062be3-65bd-4cc0-9f1f-1ac6debda923 tempest-ServerExternalEventsTest-783142580 tempest-ServerExternalEventsTest-783142580-project] Releasing lock "refresh_cache-31639194-b0c4-4eb9-a6f4-e61b067c807f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.901955] env[62820]: DEBUG nova.network.neutron [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Updating instance_info_cache with network_info: [{"id": "7aa70f31-5a35-418a-a31b-0258e18a6cf7", "address": "fa:16:3e:7a:7d:eb", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aa70f31-5a", "ovs_interfaceid": "7aa70f31-5a35-418a-a31b-0258e18a6cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.991620] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695422, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.012675] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ea23a0-fdc8-1e96-cdaa-71c10f4f844c, 'name': SearchDatastore_Task, 'duration_secs': 0.039487} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.013859] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f07f82a-4681-4c67-848b-2ee24b517a5a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.024021] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1432.024021] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b78dd1-9746-6755-5e4c-1d07b5311e35" [ 1432.024021] env[62820]: _type = "Task" [ 1432.024021] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.037530] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b78dd1-9746-6755-5e4c-1d07b5311e35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.226479] env[62820]: DEBUG oslo_vmware.api [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695423, 'name': PowerOffVM_Task, 'duration_secs': 0.228935} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.231374] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1432.231374] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1432.231673] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed4d93b8-814a-42b9-bf94-850989039126 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.314220] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1432.315070] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1432.315070] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Deleting the datastore file [datastore1] 31639194-b0c4-4eb9-a6f4-e61b067c807f {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1432.315070] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f8c7d22-94ad-4fa9-9c63-5b8016ea5fea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.324519] env[62820]: DEBUG oslo_vmware.api [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for the task: (returnval){ [ 1432.324519] env[62820]: value = "task-1695425" [ 1432.324519] env[62820]: _type = "Task" [ 1432.324519] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.333855] env[62820]: DEBUG oslo_vmware.api [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695425, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.346745] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1403f05-354d-4000-bd1c-030bd8f7cc1c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.357999] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3fe632-f00c-47c4-ba12-3d55db7043db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.399126] env[62820]: DEBUG nova.compute.manager [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1432.399353] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1432.400402] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d0f37e-23eb-4b49-a7f7-9be96b13d22a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.403613] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c6cd19-c2c3-40e2-b7f1-870cc90070c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.409115] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "refresh_cache-f186854d-3f0a-4512-83b9-2c946247ccbe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.409115] env[62820]: DEBUG nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Instance network_info: |[{"id": "7aa70f31-5a35-418a-a31b-0258e18a6cf7", "address": "fa:16:3e:7a:7d:eb", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aa70f31-5a", "ovs_interfaceid": "7aa70f31-5a35-418a-a31b-0258e18a6cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1432.409115] env[62820]: DEBUG oslo_concurrency.lockutils [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] Acquired lock "refresh_cache-f186854d-3f0a-4512-83b9-2c946247ccbe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.409115] env[62820]: DEBUG nova.network.neutron [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Refreshing network info cache for port 7aa70f31-5a35-418a-a31b-0258e18a6cf7 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1432.409115] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:7d:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7aa70f31-5a35-418a-a31b-0258e18a6cf7', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1432.416546] env[62820]: DEBUG oslo.service.loopingcall [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1432.420156] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1432.420795] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1432.423061] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c65d466-eec9-48c4-8fc9-81403f10f3bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.438644] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29693922-ce6e-4839-8cbd-19971d6d11d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.442042] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344f8bbf-051c-4cb7-8fdc-0ef6ef70c5ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.465220] env[62820]: DEBUG nova.compute.provider_tree [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1432.465220] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1432.465220] env[62820]: value = "task-1695428" [ 1432.465220] env[62820]: _type = "Task" [ 1432.465220] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.465501] env[62820]: DEBUG oslo_vmware.api [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1432.465501] env[62820]: value = "task-1695427" [ 1432.465501] env[62820]: _type = "Task" [ 1432.465501] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.481099] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695428, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.487922] env[62820]: DEBUG oslo_vmware.api [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.497724] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695422, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.813595} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.498538] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 706d42cd-53d9-4976-bc67-98816a40fff4/706d42cd-53d9-4976-bc67-98816a40fff4.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1432.498538] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1432.500611] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65dc1dd6-e119-4de0-9a64-79bb00043af7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.507723] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1432.507723] env[62820]: value = "task-1695429" [ 1432.507723] env[62820]: _type = "Task" [ 1432.507723] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.517191] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.536117] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b78dd1-9746-6755-5e4c-1d07b5311e35, 'name': SearchDatastore_Task, 'duration_secs': 0.049978} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.536573] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.537186] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 262d0714-d7d7-443c-9927-ef03ba9f230e/262d0714-d7d7-443c-9927-ef03ba9f230e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1432.537780] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53fd1e25-8b05-4e3f-8551-a0cfce1c0c45 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.548045] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1432.548045] env[62820]: value = "task-1695430" [ 1432.548045] env[62820]: _type = "Task" [ 1432.548045] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.559782] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.836496] env[62820]: DEBUG oslo_vmware.api [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Task: {'id': task-1695425, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199013} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.836712] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1432.836875] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1432.837064] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1432.837243] env[62820]: INFO nova.compute.manager [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1432.837524] env[62820]: DEBUG oslo.service.loopingcall [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1432.839254] env[62820]: DEBUG nova.compute.manager [-] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1432.839254] env[62820]: DEBUG nova.network.neutron [-] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1432.988924] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695428, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.992882] env[62820]: DEBUG oslo_vmware.api [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695427, 'name': PowerOffVM_Task, 'duration_secs': 0.224037} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.993177] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1432.993347] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1432.993660] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-caab35e5-f488-4178-be6f-63572880b8d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.008427] env[62820]: DEBUG nova.scheduler.client.report [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 62 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1433.008708] env[62820]: DEBUG nova.compute.provider_tree [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 62 to 63 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1433.008884] env[62820]: DEBUG nova.compute.provider_tree [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1433.030550] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103479} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.030832] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1433.031693] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c8a888-9049-46a2-a539-7ca5d89bdb5c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.062480] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 706d42cd-53d9-4976-bc67-98816a40fff4/706d42cd-53d9-4976-bc67-98816a40fff4.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1433.070807] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0252ab9a-bad0-4a6e-a5e4-895a0e72ccbb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.097166] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695430, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.097511] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1433.097511] env[62820]: value = "task-1695432" [ 1433.097511] env[62820]: _type = "Task" [ 1433.097511] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.110690] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695432, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.185990] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1433.186427] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1433.186829] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Deleting the datastore file [datastore1] 90ea0c16-739a-4132-ac36-e154a846b9c2 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1433.187365] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75520213-bd83-4794-9706-cfe2245c0c34 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.196650] env[62820]: DEBUG oslo_vmware.api [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for the task: (returnval){ [ 1433.196650] env[62820]: value = "task-1695433" [ 1433.196650] env[62820]: _type = "Task" [ 1433.196650] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.206680] env[62820]: DEBUG oslo_vmware.api [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.331215] env[62820]: DEBUG nova.network.neutron [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Updated VIF entry in instance network info cache for port 7aa70f31-5a35-418a-a31b-0258e18a6cf7. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1433.331574] env[62820]: DEBUG nova.network.neutron [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Updating instance_info_cache with network_info: [{"id": "7aa70f31-5a35-418a-a31b-0258e18a6cf7", "address": "fa:16:3e:7a:7d:eb", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aa70f31-5a", "ovs_interfaceid": "7aa70f31-5a35-418a-a31b-0258e18a6cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.486648] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695428, 'name': CreateVM_Task, 'duration_secs': 0.646456} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.486648] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1433.488225] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.488225] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.488225] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1433.488337] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a214482-cb41-4a47-84da-853befd2afcb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.495616] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1433.495616] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52271a5e-74ca-ef90-eaa6-83bfc17178b4" [ 1433.495616] env[62820]: _type = "Task" [ 1433.495616] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.504852] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52271a5e-74ca-ef90-eaa6-83bfc17178b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.522976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.361s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.525532] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 30.922s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.525773] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.526055] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1433.526399] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.574s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.526621] env[62820]: DEBUG nova.objects.instance [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1433.530121] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af929762-8b3b-40aa-b2d8-3572221220b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.540598] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8114508-21b3-4b39-a8bc-f44a7b3189ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.557299] env[62820]: INFO nova.scheduler.client.report [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Deleted allocations for instance 9068670d-f323-4180-92f9-f19737e955e2 [ 1433.563107] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28ff2e7-2f0f-4389-9b9a-961ed743e676 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.568633] env[62820]: DEBUG oslo_concurrency.lockutils [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.578384] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695430, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.579443] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13dd4ce-f587-40fc-bb4a-53f80010a1a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.615250] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179184MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1433.615514] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.621345] env[62820]: DEBUG nova.compute.manager [req-1a87fd1f-70f9-4cf6-b1b2-b9a712718eba req-7e56920d-21ed-42fa-9b8e-406fbecb92ca service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Received event network-vif-deleted-1da745d0-cf16-4aea-b62a-dcdc1c42e0c8 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1433.621587] env[62820]: INFO nova.compute.manager [req-1a87fd1f-70f9-4cf6-b1b2-b9a712718eba req-7e56920d-21ed-42fa-9b8e-406fbecb92ca service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Neutron deleted interface 1da745d0-cf16-4aea-b62a-dcdc1c42e0c8; detaching it from the instance and deleting it from the info cache [ 1433.621811] env[62820]: DEBUG nova.network.neutron [req-1a87fd1f-70f9-4cf6-b1b2-b9a712718eba req-7e56920d-21ed-42fa-9b8e-406fbecb92ca service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.630938] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695432, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.708296] env[62820]: DEBUG oslo_vmware.api [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Task: {'id': task-1695433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197193} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.708564] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1433.708782] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1433.709015] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1433.709292] env[62820]: INFO nova.compute.manager [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1433.709579] env[62820]: DEBUG oslo.service.loopingcall [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1433.709799] env[62820]: DEBUG nova.compute.manager [-] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1433.710337] env[62820]: DEBUG nova.network.neutron [-] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1433.805672] env[62820]: DEBUG nova.network.neutron [-] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.835380] env[62820]: DEBUG oslo_concurrency.lockutils [req-ee2b5872-6ebd-438d-91b8-afcb618f813f req-bed1980e-18e2-4fcb-a504-95d988426b05 service nova] Releasing lock "refresh_cache-f186854d-3f0a-4512-83b9-2c946247ccbe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.011139] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52271a5e-74ca-ef90-eaa6-83bfc17178b4, 'name': SearchDatastore_Task, 'duration_secs': 0.036266} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.011816] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.012392] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1434.015028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.015028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.015028] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1434.015028] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e339cb2-e0b1-451c-b434-5f0a47d2eeda {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.033027] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1434.033027] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1434.033027] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df1a674c-b4d1-4b7a-85eb-180e31fa674a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.042670] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1434.042670] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525cba73-ebb3-483c-551f-8d1ceda0c9a2" [ 1434.042670] env[62820]: _type = "Task" [ 1434.042670] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.052941] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525cba73-ebb3-483c-551f-8d1ceda0c9a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.065344] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695430, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.073418] env[62820]: DEBUG oslo_concurrency.lockutils [None req-420c996c-f218-4497-a9f2-719da0e23b95 tempest-DeleteServersAdminTestJSON-412807917 tempest-DeleteServersAdminTestJSON-412807917-project-member] Lock "9068670d-f323-4180-92f9-f19737e955e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.080s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.128436] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695432, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.128872] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35b91754-1ba3-49fe-aa17-2b64238e5af4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.138806] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ef9baf-f28b-4fab-a77d-20672db6c479 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.179708] env[62820]: DEBUG nova.compute.manager [req-1a87fd1f-70f9-4cf6-b1b2-b9a712718eba req-7e56920d-21ed-42fa-9b8e-406fbecb92ca service nova] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Detach interface failed, port_id=1da745d0-cf16-4aea-b62a-dcdc1c42e0c8, reason: Instance 31639194-b0c4-4eb9-a6f4-e61b067c807f could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1434.308579] env[62820]: INFO nova.compute.manager [-] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Took 1.47 seconds to deallocate network for instance. [ 1434.543259] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bb1e0d6b-2b4c-4f38-a182-bc4bcea6a2d3 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.543259] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.215s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.543259] env[62820]: DEBUG nova.objects.instance [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'resources' on Instance uuid fdc57b8b-a6ab-4e6d-9db0-4054b022aeec {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1434.556014] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525cba73-ebb3-483c-551f-8d1ceda0c9a2, 'name': SearchDatastore_Task, 'duration_secs': 0.04491} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.559021] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddd6731c-253d-4665-be85-31687bec830d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.567980] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1434.567980] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52dd2371-a998-ccaf-9e33-981c6b4334c0" [ 1434.567980] env[62820]: _type = "Task" [ 1434.567980] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.572233] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695430, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.661867} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.575581] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 262d0714-d7d7-443c-9927-ef03ba9f230e/262d0714-d7d7-443c-9927-ef03ba9f230e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1434.576037] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1434.576570] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5c2afc5-9d7d-4c43-a9d0-6928c3ee1fda {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.585659] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52dd2371-a998-ccaf-9e33-981c6b4334c0, 'name': SearchDatastore_Task, 'duration_secs': 0.011458} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.587374] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.588248] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] f186854d-3f0a-4512-83b9-2c946247ccbe/f186854d-3f0a-4512-83b9-2c946247ccbe.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1434.588772] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1434.588772] env[62820]: value = "task-1695435" [ 1434.588772] env[62820]: _type = "Task" [ 1434.588772] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.592081] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c1bcc1d-d7a8-492b-9e88-b0c75a7c002d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.604350] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695435, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.605566] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1434.605566] env[62820]: value = "task-1695436" [ 1434.605566] env[62820]: _type = "Task" [ 1434.605566] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.614554] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.625963] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695432, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.815129] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.049991] env[62820]: DEBUG nova.compute.manager [req-310947a1-6f55-49a5-86e2-8186aa634222 req-a2954d40-7dbf-46dd-9fed-4c0ef77afbc6 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Received event network-vif-deleted-262a6e93-a27f-4189-9a88-cb1c5fe97709 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1435.050377] env[62820]: INFO nova.compute.manager [req-310947a1-6f55-49a5-86e2-8186aa634222 req-a2954d40-7dbf-46dd-9fed-4c0ef77afbc6 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Neutron deleted interface 262a6e93-a27f-4189-9a88-cb1c5fe97709; detaching it from the instance and deleting it from the info cache [ 1435.050518] env[62820]: DEBUG nova.network.neutron [req-310947a1-6f55-49a5-86e2-8186aa634222 req-a2954d40-7dbf-46dd-9fed-4c0ef77afbc6 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.105947] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695435, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080145} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.109151] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1435.113402] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b2a4fd-4687-4a96-a15a-e182578a0257 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.124012] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695436, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.144726] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 262d0714-d7d7-443c-9927-ef03ba9f230e/262d0714-d7d7-443c-9927-ef03ba9f230e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1435.150547] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50351899-ca75-4c93-8f03-2252fa8b48b6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.165024] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695432, 'name': ReconfigVM_Task, 'duration_secs': 1.788139} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.167638] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 706d42cd-53d9-4976-bc67-98816a40fff4/706d42cd-53d9-4976-bc67-98816a40fff4.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.167638] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c85ac4b-48c7-4e10-aa44-1432db135a9b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.170219] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Acquiring lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.170559] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.170946] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Acquiring lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.171277] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.171574] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.175594] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1435.175594] env[62820]: value = "task-1695438" [ 1435.175594] env[62820]: _type = "Task" [ 1435.175594] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.175894] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1435.175894] env[62820]: value = "task-1695437" [ 1435.175894] env[62820]: _type = "Task" [ 1435.175894] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.176707] env[62820]: INFO nova.compute.manager [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Terminating instance [ 1435.202409] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695438, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.203093] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.254250] env[62820]: DEBUG nova.network.neutron [-] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.530556] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e37068-5173-4d7c-b525-d3f7e6e7a2b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.538395] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c18ed19-1d69-4750-a680-4e766415ab69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.569929] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1905559-557b-43e6-8a43-63685878d0f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.572374] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4dd2ae1-225a-40b5-b828-6a282dea4ae0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.585235] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7306ff76-cb41-43d5-8b5d-c8e50c65958b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.592991] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c72c10-6459-4879-a16e-d0c24ae8761b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.614388] env[62820]: DEBUG nova.compute.provider_tree [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.634277] env[62820]: DEBUG nova.compute.manager [req-310947a1-6f55-49a5-86e2-8186aa634222 req-a2954d40-7dbf-46dd-9fed-4c0ef77afbc6 service nova] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Detach interface failed, port_id=262a6e93-a27f-4189-9a88-cb1c5fe97709, reason: Instance 90ea0c16-739a-4132-ac36-e154a846b9c2 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1435.642520] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627043} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.642808] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] f186854d-3f0a-4512-83b9-2c946247ccbe/f186854d-3f0a-4512-83b9-2c946247ccbe.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1435.643032] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1435.643283] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee064a57-28bb-47f4-835b-61f973f55da0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.650576] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1435.650576] env[62820]: value = "task-1695439" [ 1435.650576] env[62820]: _type = "Task" [ 1435.650576] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.662191] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.694900] env[62820]: DEBUG nova.compute.manager [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1435.694900] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695437, 'name': ReconfigVM_Task, 'duration_secs': 0.381152} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.697483] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19442222-db46-433a-b96e-7c5065a8bc48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.699885] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 262d0714-d7d7-443c-9927-ef03ba9f230e/262d0714-d7d7-443c-9927-ef03ba9f230e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.700725] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695438, 'name': Rename_Task, 'duration_secs': 0.26358} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.700971] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ebd107e-19d9-4166-9fcf-f7e12cae4739 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.702536] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1435.703596] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e805710b-c623-49b8-879c-afd7640e360c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.712349] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076f20da-a23b-4fbe-8991-3989ee837384 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.726860] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1435.726860] env[62820]: value = "task-1695440" [ 1435.726860] env[62820]: _type = "Task" [ 1435.726860] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.727037] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1435.727037] env[62820]: value = "task-1695441" [ 1435.727037] env[62820]: _type = "Task" [ 1435.727037] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.743282] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695441, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.762012] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695440, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.762509] env[62820]: INFO nova.compute.manager [-] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Took 2.05 seconds to deallocate network for instance. [ 1435.763369] env[62820]: WARNING nova.virt.vmwareapi.driver [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 could not be found. [ 1435.763710] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1435.765943] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55e8a301-d43b-4186-a38c-ff73872fb98c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.780746] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4db983-8c97-4eb0-8078-1999a9781f81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.819309] env[62820]: WARNING nova.virt.vmwareapi.vmops [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4e4668ed-801a-4105-8b9e-cf37be91c8b8 could not be found. [ 1435.819553] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1435.819745] env[62820]: INFO nova.compute.manager [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Took 0.13 seconds to destroy the instance on the hypervisor. [ 1435.820019] env[62820]: DEBUG oslo.service.loopingcall [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1435.821701] env[62820]: DEBUG nova.compute.manager [-] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1435.821808] env[62820]: DEBUG nova.network.neutron [-] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1436.135958] env[62820]: DEBUG nova.scheduler.client.report [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1436.169828] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078659} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.170141] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1436.170999] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e3d0fa-3892-4b30-9fe3-fbf1a334c117 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.198698] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] f186854d-3f0a-4512-83b9-2c946247ccbe/f186854d-3f0a-4512-83b9-2c946247ccbe.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1436.199074] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a37156f2-d68e-406f-b0e2-bdaa4da33d8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.222340] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1436.222340] env[62820]: value = "task-1695442" [ 1436.222340] env[62820]: _type = "Task" [ 1436.222340] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.242218] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695442, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.253742] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695441, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.254366] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695440, 'name': Rename_Task, 'duration_secs': 0.181839} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.255448] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1436.255776] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30beed04-e4e0-4439-ac9c-8c116d81c9b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.270287] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1436.270287] env[62820]: value = "task-1695443" [ 1436.270287] env[62820]: _type = "Task" [ 1436.270287] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.274248] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.280729] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.641907] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.100s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.647048] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.218s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.647048] env[62820]: DEBUG nova.objects.instance [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lazy-loading 'resources' on Instance uuid 9287b8eb-487d-4f51-9e7c-90c016a1c8e2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1436.677246] env[62820]: INFO nova.scheduler.client.report [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted allocations for instance fdc57b8b-a6ab-4e6d-9db0-4054b022aeec [ 1436.738082] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695442, 'name': ReconfigVM_Task, 'duration_secs': 0.436431} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.738895] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Reconfigured VM instance instance-00000027 to attach disk [datastore1] f186854d-3f0a-4512-83b9-2c946247ccbe/f186854d-3f0a-4512-83b9-2c946247ccbe.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1436.739492] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-add2fee3-319d-4ab4-ae94-905d4d13a0dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.749810] env[62820]: DEBUG oslo_vmware.api [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695441, 'name': PowerOnVM_Task, 'duration_secs': 0.580657} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.750821] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1436.751092] env[62820]: INFO nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Took 10.16 seconds to spawn the instance on the hypervisor. [ 1436.751414] env[62820]: DEBUG nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1436.752830] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf3872a-3305-4ecf-bdf4-0772aa0ad74d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.758429] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1436.758429] env[62820]: value = "task-1695445" [ 1436.758429] env[62820]: _type = "Task" [ 1436.758429] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.773355] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695445, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.792575] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695443, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.930997] env[62820]: DEBUG nova.network.neutron [-] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.175040] env[62820]: DEBUG nova.compute.manager [req-38ca2384-28c1-4ad5-b086-1fe6ff1bfed3 req-070aa905-a52f-4cdc-958a-b6a7b7e9a179 service nova] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Received event network-vif-deleted-5d1e82ae-c035-4664-9764-24afac8896b1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1437.186294] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a96b5b4-b035-4961-bd40-ea9595fd9155 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "fdc57b8b-a6ab-4e6d-9db0-4054b022aeec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.293s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.270299] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695445, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.289351] env[62820]: DEBUG oslo_vmware.api [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695443, 'name': PowerOnVM_Task, 'duration_secs': 0.585822} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.289830] env[62820]: INFO nova.compute.manager [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Took 38.33 seconds to build instance. [ 1437.291381] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1437.291381] env[62820]: INFO nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Took 15.45 seconds to spawn the instance on the hypervisor. [ 1437.291514] env[62820]: DEBUG nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1437.292526] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7ae620-2e9e-48f0-8834-e3e67fd37276 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.434985] env[62820]: INFO nova.compute.manager [-] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Took 1.61 seconds to deallocate network for instance. [ 1437.444841] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] Acquiring lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.444841] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] Acquired lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.444841] env[62820]: DEBUG nova.network.neutron [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1437.656963] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d80a130-3f17-492a-b0f0-06ef69adff94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.668228] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09aea618-6084-49f3-bd95-ff62b62cdafb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.705109] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5799c3dc-30dd-4bf5-b6a5-8f19b056bf76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.714377] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4da767-da07-4ccd-ba84-8199467b5729 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.730409] env[62820]: DEBUG nova.compute.provider_tree [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1437.775761] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695445, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.799608] env[62820]: DEBUG oslo_concurrency.lockutils [None req-90c6604d-fa2a-4efb-8b63-bee591029c5a tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "706d42cd-53d9-4976-bc67-98816a40fff4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.054s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.822927] env[62820]: INFO nova.compute.manager [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Took 49.05 seconds to build instance. [ 1437.994200] env[62820]: INFO nova.compute.manager [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Took 0.56 seconds to detach 1 volumes for instance. [ 1437.999180] env[62820]: DEBUG nova.compute.manager [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Deleting volume: 763afde5-c692-44d0-a083-7f09ae379a22 {{(pid=62820) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1438.234352] env[62820]: DEBUG nova.scheduler.client.report [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1438.250499] env[62820]: DEBUG nova.network.neutron [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Updating instance_info_cache with network_info: [{"id": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "address": "fa:16:3e:5e:98:88", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc83c67d0-64", "ovs_interfaceid": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.273920] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695445, 'name': Rename_Task, 'duration_secs': 1.238773} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.274874] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1438.275154] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b781570-9623-4a3a-a083-7cdc3ed16884 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.284015] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1438.284015] env[62820]: value = "task-1695447" [ 1438.284015] env[62820]: _type = "Task" [ 1438.284015] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.295601] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695447, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.325332] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d1175fef-a6df-4b03-9233-5ccdc65b82bc tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.174s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.526366] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.526366] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.566470] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.745808] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.748482] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.268s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.750472] env[62820]: INFO nova.compute.claims [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1438.755536] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] Releasing lock "refresh_cache-706d42cd-53d9-4976-bc67-98816a40fff4" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.756498] env[62820]: DEBUG nova.compute.manager [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Inject network info {{(pid=62820) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7570}} [ 1438.756498] env[62820]: DEBUG nova.compute.manager [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] network_info to inject: |[{"id": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "address": "fa:16:3e:5e:98:88", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc83c67d0-64", "ovs_interfaceid": "c83c67d0-648f-4a10-b8a2-7e83e079d0f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7571}} [ 1438.763319] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Reconfiguring VM instance to set the machine id {{(pid=62820) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1438.763688] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-159fb18c-8f3b-487d-8bab-733123340025 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.779345] env[62820]: INFO nova.scheduler.client.report [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Deleted allocations for instance 9287b8eb-487d-4f51-9e7c-90c016a1c8e2 [ 1438.788937] env[62820]: DEBUG oslo_vmware.api [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] Waiting for the task: (returnval){ [ 1438.788937] env[62820]: value = "task-1695449" [ 1438.788937] env[62820]: _type = "Task" [ 1438.788937] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.811734] env[62820]: DEBUG oslo_vmware.api [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] Task: {'id': task-1695449, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.812433] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695447, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.961761] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "262d0714-d7d7-443c-9927-ef03ba9f230e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.962233] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.962327] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.962457] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.963202] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.966893] env[62820]: INFO nova.compute.manager [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Terminating instance [ 1439.028590] env[62820]: DEBUG nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1439.300802] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63c68c1e-0d2a-4661-b0cf-fa9da4a5d9fc tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9287b8eb-487d-4f51-9e7c-90c016a1c8e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.685s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.313966] env[62820]: DEBUG oslo_vmware.api [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695447, 'name': PowerOnVM_Task, 'duration_secs': 0.824028} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.319158] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1439.319158] env[62820]: INFO nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Took 9.22 seconds to spawn the instance on the hypervisor. [ 1439.319158] env[62820]: DEBUG nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1439.319158] env[62820]: DEBUG oslo_vmware.api [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] Task: {'id': task-1695449, 'name': ReconfigVM_Task, 'duration_secs': 0.210566} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.320569] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410ca79c-77ca-4caa-b89f-0ff8f4c73712 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.108691] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c46641-1f5a-4eec-b9ab-50caffd7c592 tempest-ServersAdminTestJSON-2099223399 tempest-ServersAdminTestJSON-2099223399-project-admin] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Reconfigured VM instance to set the machine id {{(pid=62820) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1440.111817] env[62820]: DEBUG nova.compute.manager [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1440.111817] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1440.123195] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039b7011-0b44-4306-8f00-dd476622625e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.135324] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1440.135324] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17d2a2c6-bb63-4113-b13d-1bdcd87b02c2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.142352] env[62820]: DEBUG oslo_vmware.api [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1440.142352] env[62820]: value = "task-1695450" [ 1440.142352] env[62820]: _type = "Task" [ 1440.142352] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.154215] env[62820]: DEBUG oslo_vmware.api [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.155323] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.570527] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f16c670-6d3d-4a24-aa76-a9161e6d2b9f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.578978] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf2629d-3af2-479f-8fed-690854cc16a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.610327] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8498565-97a1-4587-9c8a-238e3a7954b6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.619424] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a60bdb-7242-4bbf-92f6-fdca9e7b36fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.625025] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "9910a0ea-5ce0-41e9-b449-da729a4c3223" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.625282] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9910a0ea-5ce0-41e9-b449-da729a4c3223" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.625478] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "9910a0ea-5ce0-41e9-b449-da729a4c3223-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.625853] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9910a0ea-5ce0-41e9-b449-da729a4c3223-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.625853] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9910a0ea-5ce0-41e9-b449-da729a4c3223-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.628221] env[62820]: INFO nova.compute.manager [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Terminating instance [ 1440.640875] env[62820]: DEBUG nova.compute.provider_tree [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1440.645851] env[62820]: INFO nova.compute.manager [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Took 40.63 seconds to build instance. [ 1440.656272] env[62820]: DEBUG oslo_vmware.api [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695450, 'name': PowerOffVM_Task, 'duration_secs': 0.448236} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.656655] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1440.656719] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1440.656971] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ed1a75d-7ead-4075-807e-e893eaf483d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.913893] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1440.914253] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1440.914452] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Deleting the datastore file [datastore1] 262d0714-d7d7-443c-9927-ef03ba9f230e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1440.914735] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08219c46-35eb-4a3d-9e31-a7ccb4b5d3e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.926633] env[62820]: DEBUG oslo_vmware.api [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for the task: (returnval){ [ 1440.926633] env[62820]: value = "task-1695453" [ 1440.926633] env[62820]: _type = "Task" [ 1440.926633] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.938865] env[62820]: DEBUG oslo_vmware.api [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.150175] env[62820]: DEBUG nova.scheduler.client.report [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1441.151555] env[62820]: DEBUG nova.compute.manager [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1441.151943] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1441.152473] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e1cc5fcd-2866-4e72-8949-979c23c57934 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "f186854d-3f0a-4512-83b9-2c946247ccbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.967s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.154327] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e40e1c-a067-484c-90f6-a13a94c5e25d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.163187] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1441.163454] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6bb0ac9-2860-495c-8ba9-8f6144c6c442 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.170501] env[62820]: DEBUG oslo_vmware.api [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1441.170501] env[62820]: value = "task-1695454" [ 1441.170501] env[62820]: _type = "Task" [ 1441.170501] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.183309] env[62820]: DEBUG oslo_vmware.api [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.351332] env[62820]: INFO nova.compute.manager [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Rebuilding instance [ 1441.421762] env[62820]: DEBUG nova.compute.manager [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1441.422853] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593302e1-93bc-4ea8-bcc2-215178bee33d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.440349] env[62820]: DEBUG oslo_vmware.api [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Task: {'id': task-1695453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429534} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.441027] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1441.441027] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1441.441152] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1441.442573] env[62820]: INFO nova.compute.manager [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1441.442573] env[62820]: DEBUG oslo.service.loopingcall [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1441.442573] env[62820]: DEBUG nova.compute.manager [-] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1441.442573] env[62820]: DEBUG nova.network.neutron [-] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1441.654660] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.907s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.655299] env[62820]: DEBUG nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1441.658327] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.711s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.658556] env[62820]: DEBUG nova.objects.instance [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1441.691257] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe0c240-3da7-40f1-90cf-abe9d696218f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.706611] env[62820]: DEBUG oslo_vmware.api [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695454, 'name': PowerOffVM_Task, 'duration_secs': 0.205981} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.709121] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1441.709284] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1441.709598] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22fe2b51-005c-46d5-99e3-29bc7bcc3715 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.714426] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6de8b8f5-fec8-4339-8c2e-686b2b6ce300 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Suspending the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1441.714426] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9c222f23-7af0-4115-9860-74fdf5db18fe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.724732] env[62820]: DEBUG oslo_vmware.api [None req-6de8b8f5-fec8-4339-8c2e-686b2b6ce300 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1441.724732] env[62820]: value = "task-1695456" [ 1441.724732] env[62820]: _type = "Task" [ 1441.724732] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.735420] env[62820]: DEBUG oslo_vmware.api [None req-6de8b8f5-fec8-4339-8c2e-686b2b6ce300 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695456, 'name': SuspendVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.818190] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1441.818190] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1441.818190] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Deleting the datastore file [datastore1] 9910a0ea-5ce0-41e9-b449-da729a4c3223 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1441.818190] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6159e13d-57cc-43c9-ab9d-ed86ed765259 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.828236] env[62820]: DEBUG oslo_vmware.api [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for the task: (returnval){ [ 1441.828236] env[62820]: value = "task-1695457" [ 1441.828236] env[62820]: _type = "Task" [ 1441.828236] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.840013] env[62820]: DEBUG oslo_vmware.api [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.164767] env[62820]: DEBUG nova.compute.utils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1442.171246] env[62820]: DEBUG nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1442.172517] env[62820]: DEBUG nova.network.neutron [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1442.214124] env[62820]: DEBUG nova.compute.manager [req-eb39ea2c-b21a-47db-af21-18f9a47ef92b req-9697e7ad-121d-46f7-8f37-274cc5130787 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Received event network-vif-deleted-1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1442.214124] env[62820]: INFO nova.compute.manager [req-eb39ea2c-b21a-47db-af21-18f9a47ef92b req-9697e7ad-121d-46f7-8f37-274cc5130787 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Neutron deleted interface 1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f; detaching it from the instance and deleting it from the info cache [ 1442.214124] env[62820]: DEBUG nova.network.neutron [req-eb39ea2c-b21a-47db-af21-18f9a47ef92b req-9697e7ad-121d-46f7-8f37-274cc5130787 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Updating instance_info_cache with network_info: [{"id": "0b1a477f-8c32-4c99-892f-23f8332338e2", "address": "fa:16:3e:19:8c:b7", "network": {"id": "fa8ad356-4923-4df8-91fb-4a94668b2e08", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-157646262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a069009bc6a741379effea7b50d9e1c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b1a477f-8c", "ovs_interfaceid": "0b1a477f-8c32-4c99-892f-23f8332338e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.238624] env[62820]: DEBUG oslo_vmware.api [None req-6de8b8f5-fec8-4339-8c2e-686b2b6ce300 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695456, 'name': SuspendVM_Task} progress is 62%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.240454] env[62820]: DEBUG nova.policy [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7a0e8f276074325b78193cb7a2a3a02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1730db17199844cd8833f1176d249b0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1442.349818] env[62820]: DEBUG oslo_vmware.api [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Task: {'id': task-1695457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435012} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.350459] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1442.350621] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1442.351119] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1442.351293] env[62820]: INFO nova.compute.manager [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1442.351634] env[62820]: DEBUG oslo.service.loopingcall [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1442.351955] env[62820]: DEBUG nova.compute.manager [-] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1442.352137] env[62820]: DEBUG nova.network.neutron [-] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1442.440223] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1442.440424] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-174dff58-2708-4ed2-9e5a-24d138075779 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.449258] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1442.449258] env[62820]: value = "task-1695458" [ 1442.449258] env[62820]: _type = "Task" [ 1442.449258] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.458466] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.624194] env[62820]: DEBUG nova.network.neutron [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Successfully created port: ac0c5d29-710c-41c1-9d17-a8a15f59cb82 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1442.674697] env[62820]: DEBUG nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1442.679045] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a664da28-01db-4a29-aa22-4fc08e371808 tempest-ServersAdmin275Test-300588507 tempest-ServersAdmin275Test-300588507-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.682922] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.809s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.682922] env[62820]: DEBUG nova.objects.instance [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lazy-loading 'resources' on Instance uuid 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1442.718961] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3fb5366-00e9-40f9-be35-c923f5ad3c56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.739611] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e483701-c989-43d3-9347-ed2385188fb3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.759275] env[62820]: DEBUG oslo_vmware.api [None req-6de8b8f5-fec8-4339-8c2e-686b2b6ce300 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695456, 'name': SuspendVM_Task, 'duration_secs': 0.695588} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.764387] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6de8b8f5-fec8-4339-8c2e-686b2b6ce300 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Suspended the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1442.764387] env[62820]: DEBUG nova.compute.manager [None req-6de8b8f5-fec8-4339-8c2e-686b2b6ce300 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1442.764387] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8c3487-7d72-4019-88eb-eff5a2fc1b54 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.788626] env[62820]: DEBUG nova.compute.manager [req-eb39ea2c-b21a-47db-af21-18f9a47ef92b req-9697e7ad-121d-46f7-8f37-274cc5130787 service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Detach interface failed, port_id=1a88c41e-8fa1-4b06-a7cb-5c36d2d04f6f, reason: Instance 262d0714-d7d7-443c-9927-ef03ba9f230e could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1442.960490] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695458, 'name': PowerOffVM_Task, 'duration_secs': 0.391524} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.960827] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1442.961164] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1442.962326] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d88e705-f4fc-47a7-af75-db0b725ff504 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.971741] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1442.972095] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4da6c1d-8426-4061-a532-e63e222cae96 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.052770] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1443.053086] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1443.053309] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleting the datastore file [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1443.053654] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-286d0ee7-cdc5-46bd-9e94-a7c45aeec6ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.062497] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1443.062497] env[62820]: value = "task-1695461" [ 1443.062497] env[62820]: _type = "Task" [ 1443.062497] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.072202] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.264061] env[62820]: DEBUG nova.network.neutron [-] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.578318] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171976} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.578318] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1443.578318] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1443.578318] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1443.632240] env[62820]: DEBUG nova.network.neutron [-] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.694308] env[62820]: DEBUG nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1443.735066] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1443.735341] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1443.735500] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1443.735684] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1443.735828] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1443.736034] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1443.736286] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1443.736994] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1443.736994] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1443.736994] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1443.736994] env[62820]: DEBUG nova.virt.hardware [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1443.738040] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821c947c-1e2f-4af8-99e2-7fd7e2b880ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.743245] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0022520-4e69-4852-b4cb-6df53f979c13 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.749700] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8994ba2c-227b-413f-abc1-75be2b2f5beb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.756863] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45aead5c-c865-444b-b3d3-2635c64b6274 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.769130] env[62820]: INFO nova.compute.manager [-] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Took 2.33 seconds to deallocate network for instance. [ 1443.810843] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3ae840-575e-488f-b921-e457d0496cf4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.822060] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e817f51b-2186-4573-88ad-ae2f531dbe66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.843281] env[62820]: DEBUG nova.compute.provider_tree [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.138020] env[62820]: INFO nova.compute.manager [-] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Took 1.79 seconds to deallocate network for instance. [ 1444.279974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.348219] env[62820]: DEBUG nova.scheduler.client.report [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1444.424045] env[62820]: DEBUG nova.network.neutron [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Successfully updated port: ac0c5d29-710c-41c1-9d17-a8a15f59cb82 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1444.501410] env[62820]: DEBUG nova.compute.manager [req-ef3fdb8c-4e30-448d-b504-dd85ea1fc6dd req-2f76f1e9-0197-4023-a074-dc0fec9701ca service nova] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Received event network-vif-deleted-0b1a477f-8c32-4c99-892f-23f8332338e2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1444.501611] env[62820]: DEBUG nova.compute.manager [req-ef3fdb8c-4e30-448d-b504-dd85ea1fc6dd req-2f76f1e9-0197-4023-a074-dc0fec9701ca service nova] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Received event network-vif-deleted-f8810d7c-99fa-4aca-b414-846eebdcd345 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1444.532855] env[62820]: DEBUG nova.compute.manager [req-676418cf-f0c9-4ad5-9011-5cfee9313b7b req-771b2ddc-5a56-4c56-aebc-48aed5d1e4f7 service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Received event network-vif-plugged-ac0c5d29-710c-41c1-9d17-a8a15f59cb82 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1444.533178] env[62820]: DEBUG oslo_concurrency.lockutils [req-676418cf-f0c9-4ad5-9011-5cfee9313b7b req-771b2ddc-5a56-4c56-aebc-48aed5d1e4f7 service nova] Acquiring lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.533397] env[62820]: DEBUG oslo_concurrency.lockutils [req-676418cf-f0c9-4ad5-9011-5cfee9313b7b req-771b2ddc-5a56-4c56-aebc-48aed5d1e4f7 service nova] Lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.534067] env[62820]: DEBUG oslo_concurrency.lockutils [req-676418cf-f0c9-4ad5-9011-5cfee9313b7b req-771b2ddc-5a56-4c56-aebc-48aed5d1e4f7 service nova] Lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.534191] env[62820]: DEBUG nova.compute.manager [req-676418cf-f0c9-4ad5-9011-5cfee9313b7b req-771b2ddc-5a56-4c56-aebc-48aed5d1e4f7 service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] No waiting events found dispatching network-vif-plugged-ac0c5d29-710c-41c1-9d17-a8a15f59cb82 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1444.534357] env[62820]: WARNING nova.compute.manager [req-676418cf-f0c9-4ad5-9011-5cfee9313b7b req-771b2ddc-5a56-4c56-aebc-48aed5d1e4f7 service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Received unexpected event network-vif-plugged-ac0c5d29-710c-41c1-9d17-a8a15f59cb82 for instance with vm_state building and task_state spawning. [ 1444.618199] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1444.619067] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1444.619067] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.619234] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1444.621244] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.621244] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1444.621244] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1444.621505] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1444.621789] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1444.622102] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1444.624019] env[62820]: DEBUG nova.virt.hardware [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1444.624019] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a5473d-2b0d-43c0-84d2-b57c337b0c05 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.634098] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9f28d2-0184-44ec-b98c-e168871f0344 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.656161] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.656635] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:4b:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f1b810c-dc19-4971-a532-bdac241941cf', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1444.664466] env[62820]: DEBUG oslo.service.loopingcall [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1444.665246] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1444.666590] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e88edf1a-10f9-46d8-aff7-8054e2a02bef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.690122] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1444.690122] env[62820]: value = "task-1695462" [ 1444.690122] env[62820]: _type = "Task" [ 1444.690122] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.699758] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695462, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.855146] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.859610] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.109s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.860878] env[62820]: INFO nova.compute.claims [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1444.890976] env[62820]: INFO nova.scheduler.client.report [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Deleted allocations for instance 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1 [ 1444.927682] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "refresh_cache-e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.927835] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "refresh_cache-e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.928555] env[62820]: DEBUG nova.network.neutron [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1445.202848] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695462, 'name': CreateVM_Task, 'duration_secs': 0.508389} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.203621] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1445.204100] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.205044] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.205044] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1445.205044] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37074582-e0a6-4e39-959c-ba603878f767 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.211092] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1445.211092] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a7912e-6c93-e6ea-d8bc-9fec651b2d24" [ 1445.211092] env[62820]: _type = "Task" [ 1445.211092] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.220540] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a7912e-6c93-e6ea-d8bc-9fec651b2d24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.403910] env[62820]: DEBUG oslo_concurrency.lockutils [None req-850ca1ba-740d-40c8-91f3-67ff968c2757 tempest-ServersAaction247Test-1754691789 tempest-ServersAaction247Test-1754691789-project-member] Lock "4ab0bb5c-259d-4419-9c7d-ed3086efdcb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.815s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.477661] env[62820]: DEBUG nova.compute.manager [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1445.478700] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2284e756-865e-44b5-aacd-272774f7113a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.482807] env[62820]: DEBUG nova.network.neutron [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1445.692533] env[62820]: DEBUG nova.network.neutron [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Updating instance_info_cache with network_info: [{"id": "ac0c5d29-710c-41c1-9d17-a8a15f59cb82", "address": "fa:16:3e:96:83:f9", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac0c5d29-71", "ovs_interfaceid": "ac0c5d29-710c-41c1-9d17-a8a15f59cb82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.729755] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a7912e-6c93-e6ea-d8bc-9fec651b2d24, 'name': SearchDatastore_Task, 'duration_secs': 0.011216} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.730106] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.730368] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1445.730599] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.730830] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.731114] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1445.731249] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42cb041e-2526-484a-844b-b79c65f5fb3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.742797] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1445.743020] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1445.743807] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21c28fe6-badb-477f-903f-be6784f7b564 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.757280] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1445.757280] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522f5155-aec1-9ba7-d0a1-309949846ccc" [ 1445.757280] env[62820]: _type = "Task" [ 1445.757280] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.767072] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522f5155-aec1-9ba7-d0a1-309949846ccc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.000387] env[62820]: INFO nova.compute.manager [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] instance snapshotting [ 1446.000664] env[62820]: WARNING nova.compute.manager [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1446.006957] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf02285-8054-428a-ad3c-e4a546968c22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.035802] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4525e7b-9cf8-4498-8656-6058199e218f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.198823] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "refresh_cache-e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.199187] env[62820]: DEBUG nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Instance network_info: |[{"id": "ac0c5d29-710c-41c1-9d17-a8a15f59cb82", "address": "fa:16:3e:96:83:f9", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac0c5d29-71", "ovs_interfaceid": "ac0c5d29-710c-41c1-9d17-a8a15f59cb82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1446.199616] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:83:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3753f451-fa23-4988-9361-074fb0bd3fd4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac0c5d29-710c-41c1-9d17-a8a15f59cb82', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1446.209811] env[62820]: DEBUG oslo.service.loopingcall [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1446.213668] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1446.213668] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87a12530-3719-4a96-8744-098fcc2dd37a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.236352] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1446.236352] env[62820]: value = "task-1695463" [ 1446.236352] env[62820]: _type = "Task" [ 1446.236352] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.251099] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695463, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.270136] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522f5155-aec1-9ba7-d0a1-309949846ccc, 'name': SearchDatastore_Task, 'duration_secs': 0.011406} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.273834] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d311dcd-3ec5-40be-8275-54da1c338d87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.280424] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1446.280424] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a4b9d9-d808-0a9a-0e47-8537afcfc4ac" [ 1446.280424] env[62820]: _type = "Task" [ 1446.280424] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.290710] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a4b9d9-d808-0a9a-0e47-8537afcfc4ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.394526] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.394676] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.411130] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquiring lock "56c371a9-983f-4d5f-8abf-0183736c374c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.411891] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "56c371a9-983f-4d5f-8abf-0183736c374c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.415292] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c8a5b6-501f-4639-a1cc-95b67aef6651 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.424495] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19dc631-9c20-4694-9f3d-092ac12ad606 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.457938] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49705536-cfd7-47a0-a8f1-7ee6880eb6b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.474648] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655e842b-ef0b-4454-99bf-ed046a48479a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.494336] env[62820]: DEBUG nova.compute.provider_tree [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1446.549651] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1446.550602] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5a5548be-da7d-4ff0-a42c-f7dfc6785b04 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.562246] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1446.562246] env[62820]: value = "task-1695464" [ 1446.562246] env[62820]: _type = "Task" [ 1446.562246] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.575183] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695464, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.578335] env[62820]: DEBUG nova.compute.manager [req-80d1851c-ed20-45cd-8bbf-8412c7578cc5 req-d7c1bf68-3cbb-4bd2-91ba-e3b2abb8ba3c service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Received event network-changed-ac0c5d29-710c-41c1-9d17-a8a15f59cb82 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1446.578861] env[62820]: DEBUG nova.compute.manager [req-80d1851c-ed20-45cd-8bbf-8412c7578cc5 req-d7c1bf68-3cbb-4bd2-91ba-e3b2abb8ba3c service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Refreshing instance network info cache due to event network-changed-ac0c5d29-710c-41c1-9d17-a8a15f59cb82. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1446.579104] env[62820]: DEBUG oslo_concurrency.lockutils [req-80d1851c-ed20-45cd-8bbf-8412c7578cc5 req-d7c1bf68-3cbb-4bd2-91ba-e3b2abb8ba3c service nova] Acquiring lock "refresh_cache-e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.579254] env[62820]: DEBUG oslo_concurrency.lockutils [req-80d1851c-ed20-45cd-8bbf-8412c7578cc5 req-d7c1bf68-3cbb-4bd2-91ba-e3b2abb8ba3c service nova] Acquired lock "refresh_cache-e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.579479] env[62820]: DEBUG nova.network.neutron [req-80d1851c-ed20-45cd-8bbf-8412c7578cc5 req-d7c1bf68-3cbb-4bd2-91ba-e3b2abb8ba3c service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Refreshing network info cache for port ac0c5d29-710c-41c1-9d17-a8a15f59cb82 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1446.753999] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695463, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.793242] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a4b9d9-d808-0a9a-0e47-8537afcfc4ac, 'name': SearchDatastore_Task, 'duration_secs': 0.010875} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.793577] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.793887] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1446.797344] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f6f8c14-9e03-4452-b93c-d4e628cfb055 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.802012] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1446.802012] env[62820]: value = "task-1695465" [ 1446.802012] env[62820]: _type = "Task" [ 1446.802012] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.811614] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.899749] env[62820]: DEBUG nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1446.919915] env[62820]: DEBUG nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1446.999235] env[62820]: DEBUG nova.scheduler.client.report [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1447.078285] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695464, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.250216] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695463, 'name': CreateVM_Task, 'duration_secs': 0.813404} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.250473] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1447.251275] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.251529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.251953] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1447.252509] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc29449d-97cc-4d0b-b401-14a460477e9c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.261744] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1447.261744] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5210b0fe-3d59-b2a8-9068-0f30cffdc4d6" [ 1447.261744] env[62820]: _type = "Task" [ 1447.261744] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.276446] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5210b0fe-3d59-b2a8-9068-0f30cffdc4d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.315264] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695465, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495487} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.315735] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1447.316052] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1447.316399] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54e7d423-4f7e-4a6b-90b7-076ba05bac5f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.326018] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1447.326018] env[62820]: value = "task-1695466" [ 1447.326018] env[62820]: _type = "Task" [ 1447.326018] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.336475] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695466, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.406491] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Acquiring lock "b7c9f518-c908-42cc-ba09-59b0f8431f68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.406802] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "b7c9f518-c908-42cc-ba09-59b0f8431f68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.414275] env[62820]: DEBUG nova.network.neutron [req-80d1851c-ed20-45cd-8bbf-8412c7578cc5 req-d7c1bf68-3cbb-4bd2-91ba-e3b2abb8ba3c service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Updated VIF entry in instance network info cache for port ac0c5d29-710c-41c1-9d17-a8a15f59cb82. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1447.414699] env[62820]: DEBUG nova.network.neutron [req-80d1851c-ed20-45cd-8bbf-8412c7578cc5 req-d7c1bf68-3cbb-4bd2-91ba-e3b2abb8ba3c service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Updating instance_info_cache with network_info: [{"id": "ac0c5d29-710c-41c1-9d17-a8a15f59cb82", "address": "fa:16:3e:96:83:f9", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac0c5d29-71", "ovs_interfaceid": "ac0c5d29-710c-41c1-9d17-a8a15f59cb82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.434890] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.448431] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.507822] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.508437] env[62820]: DEBUG nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1447.511546] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.881s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.511795] env[62820]: DEBUG nova.objects.instance [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lazy-loading 'resources' on Instance uuid 766dd26e-3866-4ef3-bd87-b81e5f6bc718 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1447.576408] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695464, 'name': CreateSnapshot_Task, 'duration_secs': 0.935987} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.576691] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1447.577556] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b68084b-3317-4766-bf1b-c8a49e5be760 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.773433] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5210b0fe-3d59-b2a8-9068-0f30cffdc4d6, 'name': SearchDatastore_Task, 'duration_secs': 0.024721} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.773826] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.774090] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1447.774711] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.774711] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.774711] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1447.775016] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7373d1ce-9da6-4abb-9084-44d1d9702dc4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.784659] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1447.784841] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1447.785607] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5232887-3c23-474b-b0fa-a8aaeeac8100 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.791985] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1447.791985] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52058bac-bf3f-7fad-0e65-f1433c8556a4" [ 1447.791985] env[62820]: _type = "Task" [ 1447.791985] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.801112] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52058bac-bf3f-7fad-0e65-f1433c8556a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.835854] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695466, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10502} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.836530] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1447.836998] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d72c66c-fca4-46cb-b810-c301b77dbe37 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.860023] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1447.860363] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df5967aa-8f60-4953-9658-ddc5787fa331 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.881995] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1447.881995] env[62820]: value = "task-1695467" [ 1447.881995] env[62820]: _type = "Task" [ 1447.881995] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.892944] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695467, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.911756] env[62820]: DEBUG nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1447.921027] env[62820]: DEBUG oslo_concurrency.lockutils [req-80d1851c-ed20-45cd-8bbf-8412c7578cc5 req-d7c1bf68-3cbb-4bd2-91ba-e3b2abb8ba3c service nova] Releasing lock "refresh_cache-e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.017544] env[62820]: DEBUG nova.compute.utils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1448.022726] env[62820]: DEBUG nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1448.022925] env[62820]: DEBUG nova.network.neutron [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1448.096972] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1448.101408] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b5c91356-208f-41e8-9dee-746d5bed5d64 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.107108] env[62820]: DEBUG nova.policy [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98cc980d40954ba8a89151d205b55cc7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a76c7d2d95714ac2ab4a2016b2516d7b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1448.116396] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1448.116396] env[62820]: value = "task-1695468" [ 1448.116396] env[62820]: _type = "Task" [ 1448.116396] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.127207] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695468, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.309925] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52058bac-bf3f-7fad-0e65-f1433c8556a4, 'name': SearchDatastore_Task, 'duration_secs': 0.00986} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.316187] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e13ee611-6878-4b4f-ab83-6a7ea7a21310 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.325455] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1448.325455] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520c3d10-17bd-8794-00c6-53b49dde51ba" [ 1448.325455] env[62820]: _type = "Task" [ 1448.325455] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.351585] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520c3d10-17bd-8794-00c6-53b49dde51ba, 'name': SearchDatastore_Task, 'duration_secs': 0.014825} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.358348] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.358816] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] e45cdcfb-f2ce-4798-8e97-1c3f95e61db3/e45cdcfb-f2ce-4798-8e97-1c3f95e61db3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1448.363574] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-955efb67-3ec1-4642-86b5-475b2834856a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.378793] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1448.378793] env[62820]: value = "task-1695469" [ 1448.378793] env[62820]: _type = "Task" [ 1448.378793] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.395195] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.396367] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695467, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.452779] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.488213] env[62820]: DEBUG nova.network.neutron [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Successfully created port: c46ea4ef-6d34-4889-b119-49077f2482b7 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1448.523418] env[62820]: DEBUG nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1448.587359] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4b9b7a-4b79-49f9-9e94-7b849764d19e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.598266] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1ebe1b-ad62-4d1a-aca5-92305ff42938 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.637877] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f296f0-b5c8-4305-9e5d-4717c23456c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.649639] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab69d1ab-3f15-48c8-876e-9a4c29afb8d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.654122] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695468, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.666650] env[62820]: DEBUG nova.compute.provider_tree [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1448.896135] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695469, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.899602] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695467, 'name': ReconfigVM_Task, 'duration_secs': 0.658642} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.899959] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1448.900821] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bd10741-2856-4ce1-9475-cd079260d630 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.910661] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1448.910661] env[62820]: value = "task-1695470" [ 1448.910661] env[62820]: _type = "Task" [ 1448.910661] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.922159] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695470, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.153634] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695468, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.170447] env[62820]: DEBUG nova.scheduler.client.report [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1449.392811] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.889583} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.393355] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] e45cdcfb-f2ce-4798-8e97-1c3f95e61db3/e45cdcfb-f2ce-4798-8e97-1c3f95e61db3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1449.393821] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1449.393994] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78537f85-0775-449f-abfa-5557eb05a911 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.402662] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1449.402662] env[62820]: value = "task-1695471" [ 1449.402662] env[62820]: _type = "Task" [ 1449.402662] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.418229] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695471, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.424920] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695470, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.536367] env[62820]: DEBUG nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1449.560602] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1449.561346] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1449.561346] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1449.561346] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1449.561508] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1449.561590] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1449.561792] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1449.561961] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1449.562146] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1449.563265] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1449.563265] env[62820]: DEBUG nova.virt.hardware [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1449.563364] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de813fd-46d0-4a8f-bc8f-107d26a012a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.572694] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f20b6a1-071f-48c3-abb2-0caf2da38757 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.635659] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquiring lock "519c961c-557e-4796-88da-047c55d6be44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.635943] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "519c961c-557e-4796-88da-047c55d6be44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.636204] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquiring lock "519c961c-557e-4796-88da-047c55d6be44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.636394] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "519c961c-557e-4796-88da-047c55d6be44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.638256] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "519c961c-557e-4796-88da-047c55d6be44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.643432] env[62820]: INFO nova.compute.manager [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Terminating instance [ 1449.653119] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695468, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.677074] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.681556] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.395s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.681909] env[62820]: DEBUG nova.objects.instance [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lazy-loading 'resources' on Instance uuid d040f935-566b-4bbe-b9f6-379fd1dc1a91 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1449.711735] env[62820]: INFO nova.scheduler.client.report [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Deleted allocations for instance 766dd26e-3866-4ef3-bd87-b81e5f6bc718 [ 1449.919134] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695471, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.926214] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695470, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.033756] env[62820]: DEBUG nova.compute.manager [req-5109de5c-fdd6-4dc8-85f7-71cbb407a5e0 req-cbe86724-5e3e-4ea2-823b-56fe46e8f149 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Received event network-vif-plugged-c46ea4ef-6d34-4889-b119-49077f2482b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1450.033850] env[62820]: DEBUG oslo_concurrency.lockutils [req-5109de5c-fdd6-4dc8-85f7-71cbb407a5e0 req-cbe86724-5e3e-4ea2-823b-56fe46e8f149 service nova] Acquiring lock "1926c780-faea-40d8-a00b-6ad576349a68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.034113] env[62820]: DEBUG oslo_concurrency.lockutils [req-5109de5c-fdd6-4dc8-85f7-71cbb407a5e0 req-cbe86724-5e3e-4ea2-823b-56fe46e8f149 service nova] Lock "1926c780-faea-40d8-a00b-6ad576349a68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.034403] env[62820]: DEBUG oslo_concurrency.lockutils [req-5109de5c-fdd6-4dc8-85f7-71cbb407a5e0 req-cbe86724-5e3e-4ea2-823b-56fe46e8f149 service nova] Lock "1926c780-faea-40d8-a00b-6ad576349a68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.034501] env[62820]: DEBUG nova.compute.manager [req-5109de5c-fdd6-4dc8-85f7-71cbb407a5e0 req-cbe86724-5e3e-4ea2-823b-56fe46e8f149 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] No waiting events found dispatching network-vif-plugged-c46ea4ef-6d34-4889-b119-49077f2482b7 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1450.034642] env[62820]: WARNING nova.compute.manager [req-5109de5c-fdd6-4dc8-85f7-71cbb407a5e0 req-cbe86724-5e3e-4ea2-823b-56fe46e8f149 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Received unexpected event network-vif-plugged-c46ea4ef-6d34-4889-b119-49077f2482b7 for instance with vm_state building and task_state spawning. [ 1450.149823] env[62820]: DEBUG nova.compute.manager [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1450.149823] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1450.158326] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bbfecd-2d2f-4b02-b125-7ef268b2f4a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.164881] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695468, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.171401] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1450.172307] env[62820]: DEBUG nova.network.neutron [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Successfully updated port: c46ea4ef-6d34-4889-b119-49077f2482b7 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1450.177984] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c88e0f00-8d71-4d78-87ac-1df1da6f78a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.190507] env[62820]: DEBUG oslo_vmware.api [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1450.190507] env[62820]: value = "task-1695472" [ 1450.190507] env[62820]: _type = "Task" [ 1450.190507] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.204532] env[62820]: DEBUG oslo_vmware.api [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.225771] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2490037f-7469-4530-a696-45d754a79db7 tempest-ServersAdmin275Test-1421829862 tempest-ServersAdmin275Test-1421829862-project-member] Lock "766dd26e-3866-4ef3-bd87-b81e5f6bc718" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.875s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.425020] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695471, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.832569} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.425352] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1450.426781] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437efbda-86f2-43b8-8f30-3d0fafe1008f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.432818] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695470, 'name': Rename_Task, 'duration_secs': 1.18387} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.433575] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1450.433844] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-903463d1-7c81-4776-9ef5-1e6b57537662 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.456200] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] e45cdcfb-f2ce-4798-8e97-1c3f95e61db3/e45cdcfb-f2ce-4798-8e97-1c3f95e61db3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1450.460778] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fde1d1c-241a-4ca8-8670-02a944220f17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.477109] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1450.477109] env[62820]: value = "task-1695473" [ 1450.477109] env[62820]: _type = "Task" [ 1450.477109] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.489790] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1450.489790] env[62820]: value = "task-1695474" [ 1450.489790] env[62820]: _type = "Task" [ 1450.489790] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.490266] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695473, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.506857] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695474, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.648584] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695468, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.681948] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquiring lock "refresh_cache-1926c780-faea-40d8-a00b-6ad576349a68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.682296] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquired lock "refresh_cache-1926c780-faea-40d8-a00b-6ad576349a68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.682386] env[62820]: DEBUG nova.network.neutron [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1450.712200] env[62820]: DEBUG oslo_vmware.api [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695472, 'name': PowerOffVM_Task, 'duration_secs': 0.410737} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.718317] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1450.718317] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1450.718317] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a99415ff-68fb-42b0-a825-29767384d275 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.819962] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee75f82-2ba2-4755-94e7-8e6ad4065d5f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.831540] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ffaf0b-9397-4d1c-a069-ef4d95ed2b6e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.868071] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2723667-8f3e-4aae-a18f-b2ebb3bc8379 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.882289] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce5a8ae-7a21-4ddc-a020-58a1430adf97 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.895168] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1450.895700] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1450.896176] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Deleting the datastore file [datastore1] 519c961c-557e-4796-88da-047c55d6be44 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1450.911079] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d31e01c4-53b7-4a67-b6d3-9610845ab566 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.917418] env[62820]: DEBUG nova.compute.provider_tree [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.927868] env[62820]: DEBUG oslo_vmware.api [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for the task: (returnval){ [ 1450.927868] env[62820]: value = "task-1695476" [ 1450.927868] env[62820]: _type = "Task" [ 1450.927868] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.944190] env[62820]: DEBUG oslo_vmware.api [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.988519] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695473, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.004113] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695474, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.152968] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695468, 'name': CloneVM_Task, 'duration_secs': 2.703681} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.153656] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Created linked-clone VM from snapshot [ 1451.155159] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d60c5c5-1b5b-463d-b55c-a647c32281a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.166551] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Uploading image 28870360-f129-4d06-bd67-a3f35c895554 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1451.193285] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1451.193285] env[62820]: value = "vm-353511" [ 1451.193285] env[62820]: _type = "VirtualMachine" [ 1451.193285] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1451.194387] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4c57465d-a8ef-488d-8cef-5a8cf17d360a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.204378] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lease: (returnval){ [ 1451.204378] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522fa75a-215a-176a-2fb7-ffc8a0b98961" [ 1451.204378] env[62820]: _type = "HttpNfcLease" [ 1451.204378] env[62820]: } obtained for exporting VM: (result){ [ 1451.204378] env[62820]: value = "vm-353511" [ 1451.204378] env[62820]: _type = "VirtualMachine" [ 1451.204378] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1451.204623] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the lease: (returnval){ [ 1451.204623] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522fa75a-215a-176a-2fb7-ffc8a0b98961" [ 1451.204623] env[62820]: _type = "HttpNfcLease" [ 1451.204623] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1451.212898] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1451.212898] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522fa75a-215a-176a-2fb7-ffc8a0b98961" [ 1451.212898] env[62820]: _type = "HttpNfcLease" [ 1451.212898] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1451.285200] env[62820]: DEBUG nova.network.neutron [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1451.421914] env[62820]: DEBUG nova.scheduler.client.report [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1451.444019] env[62820]: DEBUG oslo_vmware.api [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Task: {'id': task-1695476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229017} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.444693] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1451.445254] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1451.445595] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1451.445960] env[62820]: INFO nova.compute.manager [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] [instance: 519c961c-557e-4796-88da-047c55d6be44] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1451.446719] env[62820]: DEBUG oslo.service.loopingcall [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.447097] env[62820]: DEBUG nova.compute.manager [-] [instance: 519c961c-557e-4796-88da-047c55d6be44] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1451.447416] env[62820]: DEBUG nova.network.neutron [-] [instance: 519c961c-557e-4796-88da-047c55d6be44] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1451.488063] env[62820]: DEBUG oslo_vmware.api [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695473, 'name': PowerOnVM_Task, 'duration_secs': 0.854801} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.488411] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1451.488656] env[62820]: DEBUG nova.compute.manager [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1451.489636] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce4f19b-6185-4f0e-bc48-0ad94e7a0b81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.507290] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695474, 'name': ReconfigVM_Task, 'duration_secs': 0.718931} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.507623] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Reconfigured VM instance instance-00000028 to attach disk [datastore1] e45cdcfb-f2ce-4798-8e97-1c3f95e61db3/e45cdcfb-f2ce-4798-8e97-1c3f95e61db3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1451.508294] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffcbea4a-7bd3-4716-835b-d969e2dd6cb8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.517522] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1451.517522] env[62820]: value = "task-1695478" [ 1451.517522] env[62820]: _type = "Task" [ 1451.517522] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.527897] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695478, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.589027] env[62820]: DEBUG nova.network.neutron [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Updating instance_info_cache with network_info: [{"id": "c46ea4ef-6d34-4889-b119-49077f2482b7", "address": "fa:16:3e:0b:c8:25", "network": {"id": "84f4c182-297f-48e4-b73d-ee04c7189bec", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1733904964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a76c7d2d95714ac2ab4a2016b2516d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc46ea4ef-6d", "ovs_interfaceid": "c46ea4ef-6d34-4889-b119-49077f2482b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.714231] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1451.714231] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522fa75a-215a-176a-2fb7-ffc8a0b98961" [ 1451.714231] env[62820]: _type = "HttpNfcLease" [ 1451.714231] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1451.714231] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1451.714231] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522fa75a-215a-176a-2fb7-ffc8a0b98961" [ 1451.714231] env[62820]: _type = "HttpNfcLease" [ 1451.714231] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1451.715009] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8dece59-79cc-49f3-bed2-015552e7bd53 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.723919] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52036bee-80d8-ad7e-c091-9846e26a6d6e/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1451.723919] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52036bee-80d8-ad7e-c091-9846e26a6d6e/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1451.787165] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquiring lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.790882] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.790882] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquiring lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.790882] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.790882] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.790882] env[62820]: INFO nova.compute.manager [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Terminating instance [ 1451.851614] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-add9aa2a-9c05-484a-b395-dc29fb752ec3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.928283] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.246s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.931182] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.976s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.932537] env[62820]: INFO nova.compute.claims [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1451.957439] env[62820]: INFO nova.scheduler.client.report [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Deleted allocations for instance d040f935-566b-4bbe-b9f6-379fd1dc1a91 [ 1452.011877] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.030790] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695478, 'name': Rename_Task, 'duration_secs': 0.250878} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.031232] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1452.031336] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc945f25-ea2e-43c5-a5a6-53bd4be0b4f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.039787] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1452.039787] env[62820]: value = "task-1695479" [ 1452.039787] env[62820]: _type = "Task" [ 1452.039787] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.049364] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.092630] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Releasing lock "refresh_cache-1926c780-faea-40d8-a00b-6ad576349a68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.092977] env[62820]: DEBUG nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Instance network_info: |[{"id": "c46ea4ef-6d34-4889-b119-49077f2482b7", "address": "fa:16:3e:0b:c8:25", "network": {"id": "84f4c182-297f-48e4-b73d-ee04c7189bec", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1733904964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a76c7d2d95714ac2ab4a2016b2516d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc46ea4ef-6d", "ovs_interfaceid": "c46ea4ef-6d34-4889-b119-49077f2482b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1452.093456] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:c8:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c46ea4ef-6d34-4889-b119-49077f2482b7', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1452.101727] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Creating folder: Project (a76c7d2d95714ac2ab4a2016b2516d7b). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1452.102242] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66a8d67c-15ee-4784-9575-11c1bac7232a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.117423] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Created folder: Project (a76c7d2d95714ac2ab4a2016b2516d7b) in parent group-v353379. [ 1452.117664] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Creating folder: Instances. Parent ref: group-v353512. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1452.117965] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7006781-6ec2-4061-881e-279a6d4c7a46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.132285] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Created folder: Instances in parent group-v353512. [ 1452.132285] env[62820]: DEBUG oslo.service.loopingcall [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1452.132285] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1452.132285] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dafea5c-6377-4bef-afc8-2c806ff95135 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.161845] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1452.161845] env[62820]: value = "task-1695482" [ 1452.161845] env[62820]: _type = "Task" [ 1452.161845] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.173021] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695482, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.286657] env[62820]: DEBUG nova.compute.manager [req-60453441-2e4c-4e2a-98d4-07293317687c req-3fd3920a-2b54-4de1-a395-a793ea043ae2 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Received event network-changed-c46ea4ef-6d34-4889-b119-49077f2482b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1452.288114] env[62820]: DEBUG nova.compute.manager [req-60453441-2e4c-4e2a-98d4-07293317687c req-3fd3920a-2b54-4de1-a395-a793ea043ae2 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Refreshing instance network info cache due to event network-changed-c46ea4ef-6d34-4889-b119-49077f2482b7. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1452.288114] env[62820]: DEBUG oslo_concurrency.lockutils [req-60453441-2e4c-4e2a-98d4-07293317687c req-3fd3920a-2b54-4de1-a395-a793ea043ae2 service nova] Acquiring lock "refresh_cache-1926c780-faea-40d8-a00b-6ad576349a68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.288114] env[62820]: DEBUG oslo_concurrency.lockutils [req-60453441-2e4c-4e2a-98d4-07293317687c req-3fd3920a-2b54-4de1-a395-a793ea043ae2 service nova] Acquired lock "refresh_cache-1926c780-faea-40d8-a00b-6ad576349a68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.288114] env[62820]: DEBUG nova.network.neutron [req-60453441-2e4c-4e2a-98d4-07293317687c req-3fd3920a-2b54-4de1-a395-a793ea043ae2 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Refreshing network info cache for port c46ea4ef-6d34-4889-b119-49077f2482b7 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1452.300775] env[62820]: DEBUG nova.compute.manager [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1452.300775] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1452.301952] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2174abb7-a6dd-4eed-9bac-b164a45efd93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.319396] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1452.320497] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8355397c-4c24-4276-aaf2-cebc5507592c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.329502] env[62820]: DEBUG oslo_vmware.api [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1452.329502] env[62820]: value = "task-1695483" [ 1452.329502] env[62820]: _type = "Task" [ 1452.329502] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.346749] env[62820]: DEBUG oslo_vmware.api [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695483, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.363839] env[62820]: DEBUG nova.compute.manager [req-9af905d4-e194-4a8a-9853-4d07600265d7 req-a67d7abc-6c24-45ac-ac8f-8176d9a24ffc service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Received event network-vif-deleted-29363f02-2acb-4e52-8db8-f9743ec7fb99 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1452.364350] env[62820]: INFO nova.compute.manager [req-9af905d4-e194-4a8a-9853-4d07600265d7 req-a67d7abc-6c24-45ac-ac8f-8176d9a24ffc service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Neutron deleted interface 29363f02-2acb-4e52-8db8-f9743ec7fb99; detaching it from the instance and deleting it from the info cache [ 1452.364858] env[62820]: DEBUG nova.network.neutron [req-9af905d4-e194-4a8a-9853-4d07600265d7 req-a67d7abc-6c24-45ac-ac8f-8176d9a24ffc service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.469141] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fe1d56db-cc6e-4d01-9209-73193a4f30eb tempest-ServerDiagnosticsV248Test-159901226 tempest-ServerDiagnosticsV248Test-159901226-project-member] Lock "d040f935-566b-4bbe-b9f6-379fd1dc1a91" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.963s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.552287] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695479, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.656515] env[62820]: DEBUG nova.network.neutron [-] [instance: 519c961c-557e-4796-88da-047c55d6be44] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.684431] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695482, 'name': CreateVM_Task, 'duration_secs': 0.45588} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.684857] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1452.687285] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.688022] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.688022] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1452.688184] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c8ad283-61f8-43c1-ad61-6b595248aa19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.697210] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1452.697210] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b1269d-eb1f-cc5a-98b8-8631f114c9ad" [ 1452.697210] env[62820]: _type = "Task" [ 1452.697210] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.708382] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b1269d-eb1f-cc5a-98b8-8631f114c9ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.846142] env[62820]: DEBUG oslo_vmware.api [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695483, 'name': PowerOffVM_Task, 'duration_secs': 0.273242} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.852734] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1452.852734] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1452.852734] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdd911dc-7b6f-43e2-9ba9-319e77ce8749 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.869475] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa33e9bf-a34b-49d7-bd50-cea3d58c59f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.884181] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5c61fb-f956-4ce1-8e50-bc11c12c8f40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.929188] env[62820]: DEBUG nova.compute.manager [req-9af905d4-e194-4a8a-9853-4d07600265d7 req-a67d7abc-6c24-45ac-ac8f-8176d9a24ffc service nova] [instance: 519c961c-557e-4796-88da-047c55d6be44] Detach interface failed, port_id=29363f02-2acb-4e52-8db8-f9743ec7fb99, reason: Instance 519c961c-557e-4796-88da-047c55d6be44 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1452.978668] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1452.979283] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1452.979283] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Deleting the datastore file [datastore1] 0eb62424-0ee6-4ff4-94c2-bb6a10861759 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1452.979446] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8416a47-1459-405c-995c-bfba182edcf5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.988736] env[62820]: DEBUG oslo_vmware.api [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for the task: (returnval){ [ 1452.988736] env[62820]: value = "task-1695485" [ 1452.988736] env[62820]: _type = "Task" [ 1452.988736] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.001673] env[62820]: DEBUG oslo_vmware.api [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.060316] env[62820]: DEBUG oslo_vmware.api [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695479, 'name': PowerOnVM_Task, 'duration_secs': 0.612577} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.060963] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1453.061082] env[62820]: INFO nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Took 9.37 seconds to spawn the instance on the hypervisor. [ 1453.061298] env[62820]: DEBUG nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1453.062124] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c6d1be-227d-4584-8d3a-8907a0484dff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.069057] env[62820]: DEBUG nova.objects.instance [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lazy-loading 'flavor' on Instance uuid 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1453.166544] env[62820]: INFO nova.compute.manager [-] [instance: 519c961c-557e-4796-88da-047c55d6be44] Took 1.72 seconds to deallocate network for instance. [ 1453.214828] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b1269d-eb1f-cc5a-98b8-8631f114c9ad, 'name': SearchDatastore_Task, 'duration_secs': 0.015328} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.222031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.222342] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1453.222563] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.222715] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.222935] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1453.223289] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed77624f-1bc5-4dc1-ba08-efaf8514fab7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.235748] env[62820]: DEBUG oslo_concurrency.lockutils [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "a06d736c-a704-46e8-a6f7-85d8be40804f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.236081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.241041] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1453.241267] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1453.241885] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8165a75c-be9d-4bc4-bd56-2e7af3f53282 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.255428] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1453.255428] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5272ae8b-12a5-4157-00c5-7a82cff37eb6" [ 1453.255428] env[62820]: _type = "Task" [ 1453.255428] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.266407] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5272ae8b-12a5-4157-00c5-7a82cff37eb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.360159] env[62820]: DEBUG nova.network.neutron [req-60453441-2e4c-4e2a-98d4-07293317687c req-3fd3920a-2b54-4de1-a395-a793ea043ae2 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Updated VIF entry in instance network info cache for port c46ea4ef-6d34-4889-b119-49077f2482b7. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1453.360566] env[62820]: DEBUG nova.network.neutron [req-60453441-2e4c-4e2a-98d4-07293317687c req-3fd3920a-2b54-4de1-a395-a793ea043ae2 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Updating instance_info_cache with network_info: [{"id": "c46ea4ef-6d34-4889-b119-49077f2482b7", "address": "fa:16:3e:0b:c8:25", "network": {"id": "84f4c182-297f-48e4-b73d-ee04c7189bec", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1733904964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a76c7d2d95714ac2ab4a2016b2516d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc46ea4ef-6d", "ovs_interfaceid": "c46ea4ef-6d34-4889-b119-49077f2482b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.501725] env[62820]: DEBUG oslo_vmware.api [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Task: {'id': task-1695485, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259433} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.502784] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1453.502784] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1453.502784] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1453.502784] env[62820]: INFO nova.compute.manager [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1453.502784] env[62820]: DEBUG oslo.service.loopingcall [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.503306] env[62820]: DEBUG nova.compute.manager [-] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1453.503306] env[62820]: DEBUG nova.network.neutron [-] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1453.529308] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e00781-d097-4c6c-8fc8-034bc54c2c1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.538917] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ffd5ce-b070-426c-8cda-08a733ba9e95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.575767] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76525ac4-4bb6-46f6-ac45-56df198b1118 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.590343] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.590568] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquired lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.600305] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38acd625-2642-46c6-9232-1f6d128ce8d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.604595] env[62820]: INFO nova.compute.manager [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Took 41.14 seconds to build instance. [ 1453.619191] env[62820]: DEBUG nova.compute.provider_tree [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.685234] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.739728] env[62820]: DEBUG nova.compute.utils [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1453.773705] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5272ae8b-12a5-4157-00c5-7a82cff37eb6, 'name': SearchDatastore_Task, 'duration_secs': 0.014309} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.774921] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb10e77c-ea06-449c-a13a-5f01bbb4b4a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.789428] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1453.789428] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5259113d-82d1-c17e-32cb-30689d55d302" [ 1453.789428] env[62820]: _type = "Task" [ 1453.789428] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.799514] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5259113d-82d1-c17e-32cb-30689d55d302, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.868806] env[62820]: DEBUG oslo_concurrency.lockutils [req-60453441-2e4c-4e2a-98d4-07293317687c req-3fd3920a-2b54-4de1-a395-a793ea043ae2 service nova] Releasing lock "refresh_cache-1926c780-faea-40d8-a00b-6ad576349a68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.945783] env[62820]: INFO nova.compute.manager [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Rebuilding instance [ 1454.013116] env[62820]: DEBUG nova.compute.manager [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1454.014225] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2682775f-8df2-4b0b-b1a7-690a6833dcd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.108128] env[62820]: DEBUG oslo_concurrency.lockutils [None req-55b2af84-83fe-45d1-8148-9cf56c0e9b70 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.659s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.122531] env[62820]: DEBUG nova.scheduler.client.report [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1454.244698] env[62820]: DEBUG oslo_concurrency.lockutils [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.302792] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5259113d-82d1-c17e-32cb-30689d55d302, 'name': SearchDatastore_Task, 'duration_secs': 0.015089} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.303175] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.303538] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 1926c780-faea-40d8-a00b-6ad576349a68/1926c780-faea-40d8-a00b-6ad576349a68.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1454.303839] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17507752-3e40-46ad-9b1d-9dac57ad4be0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.316743] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1454.316743] env[62820]: value = "task-1695486" [ 1454.316743] env[62820]: _type = "Task" [ 1454.316743] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.326910] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.593943] env[62820]: DEBUG nova.network.neutron [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1454.629404] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.632114] env[62820]: DEBUG nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1454.635341] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.883s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.636825] env[62820]: DEBUG nova.objects.instance [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lazy-loading 'resources' on Instance uuid 2f917745-28ef-4dfe-8c09-45c15a80145d {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1454.842018] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695486, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.031509] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1455.031875] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11faa9f2-62c5-42f2-9192-33e812d132b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.041789] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1455.041789] env[62820]: value = "task-1695487" [ 1455.041789] env[62820]: _type = "Task" [ 1455.041789] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.052065] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.097947] env[62820]: DEBUG nova.network.neutron [-] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.138217] env[62820]: DEBUG nova.compute.utils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1455.142334] env[62820]: DEBUG nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1455.142660] env[62820]: DEBUG nova.network.neutron [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1455.253510] env[62820]: DEBUG nova.policy [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9220b3befd9641719c49a131cb86db41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f59ab047666940c6bcb633a221194395', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1455.334257] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695486, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.343853] env[62820]: DEBUG oslo_concurrency.lockutils [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "a06d736c-a704-46e8-a6f7-85d8be40804f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.343853] env[62820]: DEBUG oslo_concurrency.lockutils [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.343853] env[62820]: INFO nova.compute.manager [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Attaching volume 02d0f274-09bd-42ba-8cf0-0c80226e94a5 to /dev/sdb [ 1455.388727] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af5a411-5df1-4d82-8d94-2095fdbc88ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.401321] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc4abb5-37a6-4bbd-a28f-ee3e486b5f77 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.413850] env[62820]: DEBUG nova.compute.manager [req-820b948c-7485-46a8-9a30-48c5cc87d4f1 req-fe2a049e-c36a-467c-8023-76244a1a5426 service nova] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Received event network-vif-deleted-ad48a330-41a2-437b-92eb-66a7086d8380 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1455.421251] env[62820]: DEBUG nova.virt.block_device [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Updating existing volume attachment record: 7562bee3-94e0-442a-a001-0357d9a79e4b {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1455.441371] env[62820]: DEBUG nova.compute.manager [req-69ffe9d4-eb50-4964-92bd-8bc89966e59d req-6e2d7f44-15a7-4d27-aa72-deb652fe1dba service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Received event network-changed-99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1455.441371] env[62820]: DEBUG nova.compute.manager [req-69ffe9d4-eb50-4964-92bd-8bc89966e59d req-6e2d7f44-15a7-4d27-aa72-deb652fe1dba service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Refreshing instance network info cache due to event network-changed-99ce12db-7b90-44f1-8086-9f95246773fe. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1455.441371] env[62820]: DEBUG oslo_concurrency.lockutils [req-69ffe9d4-eb50-4964-92bd-8bc89966e59d req-6e2d7f44-15a7-4d27-aa72-deb652fe1dba service nova] Acquiring lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.554541] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695487, 'name': PowerOffVM_Task, 'duration_secs': 0.301777} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.554541] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1455.554541] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1455.555311] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0666f0d-20e8-4c9a-8556-b34cb28e2472 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.564277] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1455.564960] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30e5bae1-8159-4fea-bf5e-50c4c3a091f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.602661] env[62820]: INFO nova.compute.manager [-] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Took 2.10 seconds to deallocate network for instance. [ 1455.652169] env[62820]: DEBUG nova.compute.utils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1455.654099] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86887f7a-ccbd-4748-9507-4cbb82491c44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.666251] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df115ce2-9cd9-4d86-a6e6-f757356cf515 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.676629] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1455.676739] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1455.676933] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleting the datastore file [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1455.677760] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3299dca-cb0b-44ae-a358-b4b0c2f93d85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.718552] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144f3c5a-016a-4861-ad20-5d82a2e6a113 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.726343] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1455.726343] env[62820]: value = "task-1695490" [ 1455.726343] env[62820]: _type = "Task" [ 1455.726343] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.734397] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094a1cd0-e55a-4395-8ddc-9460bc4114e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.741734] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.752084] env[62820]: DEBUG nova.compute.provider_tree [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1455.754104] env[62820]: DEBUG nova.network.neutron [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Successfully created port: 52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1455.832178] env[62820]: DEBUG nova.network.neutron [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updating instance_info_cache with network_info: [{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.837539] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695486, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.111328] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.158385] env[62820]: DEBUG nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1456.240383] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.457068} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.240383] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1456.240383] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1456.240383] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1456.260474] env[62820]: DEBUG nova.scheduler.client.report [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1456.334201] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695486, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.718497} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.335811] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 1926c780-faea-40d8-a00b-6ad576349a68/1926c780-faea-40d8-a00b-6ad576349a68.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1456.336281] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1456.336470] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ddc759de-9b05-4ae9-8659-85ff81d6e5e5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.338920] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Releasing lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.339160] env[62820]: DEBUG nova.compute.manager [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Inject network info {{(pid=62820) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7570}} [ 1456.339424] env[62820]: DEBUG nova.compute.manager [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] network_info to inject: |[{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7571}} [ 1456.344099] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Reconfiguring VM instance to set the machine id {{(pid=62820) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1456.344427] env[62820]: DEBUG oslo_concurrency.lockutils [req-69ffe9d4-eb50-4964-92bd-8bc89966e59d req-6e2d7f44-15a7-4d27-aa72-deb652fe1dba service nova] Acquired lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.344605] env[62820]: DEBUG nova.network.neutron [req-69ffe9d4-eb50-4964-92bd-8bc89966e59d req-6e2d7f44-15a7-4d27-aa72-deb652fe1dba service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Refreshing network info cache for port 99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1456.345804] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7fd814a-0693-4be3-aef0-3e21b4277945 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.364068] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1456.364068] env[62820]: value = "task-1695493" [ 1456.364068] env[62820]: _type = "Task" [ 1456.364068] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.373148] env[62820]: DEBUG oslo_vmware.api [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1456.373148] env[62820]: value = "task-1695494" [ 1456.373148] env[62820]: _type = "Task" [ 1456.373148] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.382051] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.391437] env[62820]: DEBUG oslo_vmware.api [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695494, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.768139] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.133s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.772537] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.825s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1456.776025] env[62820]: INFO nova.compute.claims [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1456.807540] env[62820]: INFO nova.scheduler.client.report [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Deleted allocations for instance 2f917745-28ef-4dfe-8c09-45c15a80145d [ 1456.822108] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquiring lock "c06e3dcd-b997-497c-865d-5f277695cd7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.822108] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "c06e3dcd-b997-497c-865d-5f277695cd7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1456.878402] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114066} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.879339] env[62820]: DEBUG nova.network.neutron [req-69ffe9d4-eb50-4964-92bd-8bc89966e59d req-6e2d7f44-15a7-4d27-aa72-deb652fe1dba service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updated VIF entry in instance network info cache for port 99ce12db-7b90-44f1-8086-9f95246773fe. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1456.879806] env[62820]: DEBUG nova.network.neutron [req-69ffe9d4-eb50-4964-92bd-8bc89966e59d req-6e2d7f44-15a7-4d27-aa72-deb652fe1dba service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updating instance_info_cache with network_info: [{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.884628] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1456.886486] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab4cd1f-d19c-45af-883c-aaf5ab587376 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.896632] env[62820]: DEBUG oslo_vmware.api [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695494, 'name': ReconfigVM_Task, 'duration_secs': 0.205105} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.905936] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b765fa-4b12-47fa-af88-c3c786f2b8d4 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Reconfigured VM instance to set the machine id {{(pid=62820) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1456.918627] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 1926c780-faea-40d8-a00b-6ad576349a68/1926c780-faea-40d8-a00b-6ad576349a68.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1456.919415] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f5d164f-39ff-492d-bb8f-8dfb602fab90 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.953347] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1456.953347] env[62820]: value = "task-1695495" [ 1456.953347] env[62820]: _type = "Task" [ 1456.953347] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.966694] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695495, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.176175] env[62820]: DEBUG nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1457.196500] env[62820]: DEBUG nova.compute.manager [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1457.197818] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808bd649-913b-48ba-b9b9-80ed6fd6a8f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.217031] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:47:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='2096405995',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-92735740',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1457.217031] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1457.217031] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1457.217031] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1457.217031] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1457.217031] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1457.217031] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1457.217460] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1457.217460] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1457.217460] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1457.217621] env[62820]: DEBUG nova.virt.hardware [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1457.218512] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8390b0-9f7b-45fb-a8f5-e20921cdb28c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.228534] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f237c5f-e6e9-4ec6-b2d0-7424df1bdb96 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.278684] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1457.278684] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1457.278684] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1457.279724] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1457.279724] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1457.279724] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1457.279724] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1457.279724] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1457.280298] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1457.280298] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1457.280298] env[62820]: DEBUG nova.virt.hardware [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1457.283849] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f991c0-2822-42e5-9c4d-b53467f8e968 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.293255] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4926885-7c7e-4a94-ab98-38fdf77b6d69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.308511] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:4b:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f1b810c-dc19-4971-a532-bdac241941cf', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1457.315960] env[62820]: DEBUG oslo.service.loopingcall [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.316698] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1457.316861] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a83d54c7-cdaa-463b-822f-59e563e14952 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.334246] env[62820]: DEBUG nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1457.340202] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20a580fc-619b-4999-ad6e-2458ec9b201e tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "2f917745-28ef-4dfe-8c09-45c15a80145d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.506s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.344588] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1457.344588] env[62820]: value = "task-1695496" [ 1457.344588] env[62820]: _type = "Task" [ 1457.344588] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.354446] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695496, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.386076] env[62820]: DEBUG oslo_concurrency.lockutils [req-69ffe9d4-eb50-4964-92bd-8bc89966e59d req-6e2d7f44-15a7-4d27-aa72-deb652fe1dba service nova] Releasing lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.446983] env[62820]: DEBUG nova.network.neutron [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Successfully updated port: 52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1457.463726] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695495, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.619195] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.619481] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.619854] env[62820]: DEBUG nova.objects.instance [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'flavor' on Instance uuid bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1457.724219] env[62820]: INFO nova.compute.manager [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] instance snapshotting [ 1457.727075] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147d14e7-2903-4573-95af-2c8c941fb850 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.750230] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e60c7e-d677-4917-97eb-2f2768e6dd1f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.856226] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695496, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.858037] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.951204] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.951204] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquired lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.951204] env[62820]: DEBUG nova.network.neutron [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1457.971648] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695495, 'name': ReconfigVM_Task, 'duration_secs': 0.558352} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.971648] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 1926c780-faea-40d8-a00b-6ad576349a68/1926c780-faea-40d8-a00b-6ad576349a68.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1457.971648] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dac386e0-8483-4fc6-a823-e49b3380fa55 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.975247] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1457.975247] env[62820]: value = "task-1695497" [ 1457.975247] env[62820]: _type = "Task" [ 1457.975247] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.988888] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695497, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.218584] env[62820]: DEBUG nova.objects.instance [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'pci_requests' on Instance uuid bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1458.225409] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d120ba6-0be4-4be4-9a14-cbdafc9dd761 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.235144] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519982be-69ec-4ba5-836c-7da222f96ac4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.275407] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1458.276160] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-18e141b8-bdbc-484b-b572-a37ddbea4c89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.278791] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb40b3ba-6e83-4c62-ab53-9bdcc83556ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.293906] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66144f82-6365-4b26-b6df-71d3d633011a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.298271] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1458.298271] env[62820]: value = "task-1695499" [ 1458.298271] env[62820]: _type = "Task" [ 1458.298271] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.312338] env[62820]: DEBUG nova.compute.provider_tree [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1458.320644] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695499, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.358150] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695496, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.365681] env[62820]: DEBUG nova.compute.manager [req-defc5611-861d-4dd6-8415-195ade450c71 req-dac2fcc3-256e-499b-9c5d-cad6f32d48ee service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Received event network-vif-plugged-52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1458.366526] env[62820]: DEBUG oslo_concurrency.lockutils [req-defc5611-861d-4dd6-8415-195ade450c71 req-dac2fcc3-256e-499b-9c5d-cad6f32d48ee service nova] Acquiring lock "b6c58867-914e-4e6e-8092-fc8991dc87f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.366526] env[62820]: DEBUG oslo_concurrency.lockutils [req-defc5611-861d-4dd6-8415-195ade450c71 req-dac2fcc3-256e-499b-9c5d-cad6f32d48ee service nova] Lock "b6c58867-914e-4e6e-8092-fc8991dc87f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.366526] env[62820]: DEBUG oslo_concurrency.lockutils [req-defc5611-861d-4dd6-8415-195ade450c71 req-dac2fcc3-256e-499b-9c5d-cad6f32d48ee service nova] Lock "b6c58867-914e-4e6e-8092-fc8991dc87f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.366526] env[62820]: DEBUG nova.compute.manager [req-defc5611-861d-4dd6-8415-195ade450c71 req-dac2fcc3-256e-499b-9c5d-cad6f32d48ee service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] No waiting events found dispatching network-vif-plugged-52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1458.366746] env[62820]: WARNING nova.compute.manager [req-defc5611-861d-4dd6-8415-195ade450c71 req-dac2fcc3-256e-499b-9c5d-cad6f32d48ee service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Received unexpected event network-vif-plugged-52edbaed-89b5-4d7c-9398-b22a3e8b22be for instance with vm_state building and task_state spawning. [ 1458.403263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquiring lock "ee188979-e740-4125-a17f-1c02ef9588f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.403448] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "ee188979-e740-4125-a17f-1c02ef9588f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.485610] env[62820]: DEBUG nova.network.neutron [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1458.492451] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695497, 'name': Rename_Task, 'duration_secs': 0.289216} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.492994] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1458.493123] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8103199c-8cf7-43cf-8f41-19917c4b21a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.501570] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1458.501570] env[62820]: value = "task-1695500" [ 1458.501570] env[62820]: _type = "Task" [ 1458.501570] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.513039] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695500, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.688425] env[62820]: DEBUG nova.network.neutron [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Updating instance_info_cache with network_info: [{"id": "52edbaed-89b5-4d7c-9398-b22a3e8b22be", "address": "fa:16:3e:be:fa:53", "network": {"id": "71e54fc0-96e2-4d3e-819d-c545cdd4f052", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1297533262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f59ab047666940c6bcb633a221194395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52edbaed-89", "ovs_interfaceid": "52edbaed-89b5-4d7c-9398-b22a3e8b22be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.720865] env[62820]: DEBUG nova.objects.base [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1458.720865] env[62820]: DEBUG nova.network.neutron [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1458.780045] env[62820]: DEBUG nova.policy [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1458.809984] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695499, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.824192] env[62820]: DEBUG nova.scheduler.client.report [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1458.861057] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695496, 'name': CreateVM_Task, 'duration_secs': 1.142901} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.861251] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1458.861926] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.862101] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.862689] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1458.862966] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f9c83b7-58da-4867-a203-8eac5f3b27f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.869224] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1458.869224] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52fdbd87-8f3a-b0ee-fd95-6070f9fcca2c" [ 1458.869224] env[62820]: _type = "Task" [ 1458.869224] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.880888] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fdbd87-8f3a-b0ee-fd95-6070f9fcca2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.905791] env[62820]: DEBUG nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1459.015282] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695500, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.085432] env[62820]: DEBUG nova.objects.instance [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lazy-loading 'flavor' on Instance uuid 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.175126] env[62820]: DEBUG nova.network.neutron [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Successfully created port: 2e2c8225-56f6-4223-a6ea-721acb71e411 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1459.191909] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Releasing lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.192291] env[62820]: DEBUG nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Instance network_info: |[{"id": "52edbaed-89b5-4d7c-9398-b22a3e8b22be", "address": "fa:16:3e:be:fa:53", "network": {"id": "71e54fc0-96e2-4d3e-819d-c545cdd4f052", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1297533262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f59ab047666940c6bcb633a221194395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52edbaed-89", "ovs_interfaceid": "52edbaed-89b5-4d7c-9398-b22a3e8b22be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1459.193173] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:fa:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52edbaed-89b5-4d7c-9398-b22a3e8b22be', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1459.201654] env[62820]: DEBUG oslo.service.loopingcall [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.201654] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1459.201654] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f83aedca-a2fe-4e11-83cb-61b1f077cc97 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.225974] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1459.225974] env[62820]: value = "task-1695501" [ 1459.225974] env[62820]: _type = "Task" [ 1459.225974] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.239673] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695501, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.311219] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695499, 'name': CreateSnapshot_Task, 'duration_secs': 0.768661} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.311640] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1459.315021] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60d5ec4-25c1-4e1a-8c3f-8717e467e0f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.331906] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.332485] env[62820]: DEBUG nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1459.335081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.989s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.335314] env[62820]: DEBUG nova.objects.instance [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lazy-loading 'resources' on Instance uuid 06fb6034-e010-49bd-9e5e-7699a43dd5a9 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.386262] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fdbd87-8f3a-b0ee-fd95-6070f9fcca2c, 'name': SearchDatastore_Task, 'duration_secs': 0.024347} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.386588] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.386834] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1459.387086] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.387268] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.387463] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1459.387728] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a44caf06-71a1-40a8-812f-b1e1420e3056 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.399872] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1459.400099] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1459.400861] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7411795-abe5-4fb0-8f45-22640070c4f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.408717] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1459.408717] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525426b9-ea27-8442-8589-d3fef807addc" [ 1459.408717] env[62820]: _type = "Task" [ 1459.408717] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.421903] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525426b9-ea27-8442-8589-d3fef807addc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.432615] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.517390] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695500, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.590937] env[62820]: DEBUG oslo_concurrency.lockutils [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.591181] env[62820]: DEBUG oslo_concurrency.lockutils [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquired lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.738259] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695501, 'name': CreateVM_Task, 'duration_secs': 0.420768} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.738453] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1459.739140] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.739338] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.739659] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1459.739917] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dd26f32-c72e-43ac-9350-bd46cc20ccee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.746241] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1459.746241] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526e60db-dca1-1c47-7be8-c353902b3750" [ 1459.746241] env[62820]: _type = "Task" [ 1459.746241] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.755148] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526e60db-dca1-1c47-7be8-c353902b3750, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.834966] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1459.835489] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-212ae96d-9e60-4681-90e9-8293cade1678 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.841426] env[62820]: DEBUG nova.compute.utils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1459.848073] env[62820]: DEBUG nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1459.849228] env[62820]: DEBUG nova.network.neutron [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1459.850206] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1459.850206] env[62820]: value = "task-1695502" [ 1459.850206] env[62820]: _type = "Task" [ 1459.850206] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.862406] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695502, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.916817] env[62820]: DEBUG nova.policy [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d43a716b15a4fb5a628b33b5ca8afe2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bfd16891a3f453da8583d65051a2afb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1459.922760] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525426b9-ea27-8442-8589-d3fef807addc, 'name': SearchDatastore_Task, 'duration_secs': 0.017601} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.923577] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc8c33a4-37ae-4e4e-9bef-940743de0e0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.935285] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1459.935285] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52558523-ffb3-dfb1-9155-d9b6592d2226" [ 1459.935285] env[62820]: _type = "Task" [ 1459.935285] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.945621] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52558523-ffb3-dfb1-9155-d9b6592d2226, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.018022] env[62820]: DEBUG oslo_vmware.api [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695500, 'name': PowerOnVM_Task, 'duration_secs': 1.463696} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.018022] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1460.018022] env[62820]: INFO nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Took 10.48 seconds to spawn the instance on the hypervisor. [ 1460.018519] env[62820]: DEBUG nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1460.019554] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6faa09-cb2e-47a9-b3c8-bf09b98163b6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.025981] env[62820]: DEBUG nova.network.neutron [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1460.257743] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526e60db-dca1-1c47-7be8-c353902b3750, 'name': SearchDatastore_Task, 'duration_secs': 0.015436} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.261942] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.262231] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1460.262457] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.331767] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2da1fb-b502-49c6-b61c-bf1183152b6a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.342340] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e6c55a-1b55-4280-9325-bcce5306d5fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.346542] env[62820]: DEBUG nova.network.neutron [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Successfully created port: 243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1460.348882] env[62820]: DEBUG nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1460.383345] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b67055-5632-4be1-afd3-fafd1a44e581 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.390598] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695502, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.396775] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d4293c-40ec-43de-8e6e-a44efb665a92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.415135] env[62820]: DEBUG nova.compute.provider_tree [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1460.448783] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52558523-ffb3-dfb1-9155-d9b6592d2226, 'name': SearchDatastore_Task, 'duration_secs': 0.016789} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.449149] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.449504] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1460.449623] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.449797] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1460.450147] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bed3fa48-0855-4d63-bd56-5959d1b04036 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.452048] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3a51e74-be2b-435d-8408-a9ccfc000dd8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.461533] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1460.461533] env[62820]: value = "task-1695503" [ 1460.461533] env[62820]: _type = "Task" [ 1460.461533] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.467685] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1460.468011] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1460.468624] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bdcd8fe-9b08-441b-9a0a-5da83088a273 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.473158] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52036bee-80d8-ad7e-c091-9846e26a6d6e/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1460.474273] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f68248-208b-4246-a5eb-68dec62c03e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.480461] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.482783] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1460.483028] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353516', 'volume_id': '02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'name': 'volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a06d736c-a704-46e8-a6f7-85d8be40804f', 'attached_at': '', 'detached_at': '', 'volume_id': '02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'serial': '02d0f274-09bd-42ba-8cf0-0c80226e94a5'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1460.483857] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c90faa-d104-4e1c-97d8-2ac72bf7d9e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.489121] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52036bee-80d8-ad7e-c091-9846e26a6d6e/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1460.489302] env[62820]: ERROR oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52036bee-80d8-ad7e-c091-9846e26a6d6e/disk-0.vmdk due to incomplete transfer. [ 1460.489636] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1460.489636] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a7a57b-da0d-7ba4-feab-895629ee131e" [ 1460.489636] env[62820]: _type = "Task" [ 1460.489636] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.490163] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-99d7c7a4-3833-44a5-89bc-f5ac2b8ed1be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.507473] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18979318-c899-41d2-ac2a-02fc542c18a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.513702] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a7a57b-da0d-7ba4-feab-895629ee131e, 'name': SearchDatastore_Task, 'duration_secs': 0.012094} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.515767] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52036bee-80d8-ad7e-c091-9846e26a6d6e/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1460.515970] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Uploaded image 28870360-f129-4d06-bd67-a3f35c895554 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1460.518318] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1460.531584] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ff33ddd-425f-4494-9d7e-3673b9fea137 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.534124] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e0f7ad5b-da17-4177-9304-a55736335ddb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.544500] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5/volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1460.549049] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5c2b5d3-1093-447b-9377-eea51313d5af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.564869] env[62820]: INFO nova.compute.manager [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Took 42.85 seconds to build instance. [ 1460.568665] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1460.568665] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525dc112-b8d8-620c-f376-7bc77f2b1935" [ 1460.568665] env[62820]: _type = "Task" [ 1460.568665] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.575398] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1460.575398] env[62820]: value = "task-1695505" [ 1460.575398] env[62820]: _type = "Task" [ 1460.575398] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.576103] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1460.576103] env[62820]: value = "task-1695504" [ 1460.576103] env[62820]: _type = "Task" [ 1460.576103] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.589405] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525dc112-b8d8-620c-f376-7bc77f2b1935, 'name': SearchDatastore_Task} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.590446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.590722] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b6c58867-914e-4e6e-8092-fc8991dc87f7/b6c58867-914e-4e6e-8092-fc8991dc87f7.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1460.592121] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c011307c-15b9-440f-8062-20b950828cda {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.600215] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695505, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.600489] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695504, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.608496] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1460.608496] env[62820]: value = "task-1695506" [ 1460.608496] env[62820]: _type = "Task" [ 1460.608496] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.619376] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.712535] env[62820]: DEBUG nova.compute.manager [req-c28224ed-b8d4-431e-85a0-ec9f143d6fea req-b49dbabd-d069-4cf3-a5d0-d7ef8ac9f002 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Received event network-changed-99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1460.712816] env[62820]: DEBUG nova.compute.manager [req-c28224ed-b8d4-431e-85a0-ec9f143d6fea req-b49dbabd-d069-4cf3-a5d0-d7ef8ac9f002 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Refreshing instance network info cache due to event network-changed-99ce12db-7b90-44f1-8086-9f95246773fe. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1460.713109] env[62820]: DEBUG oslo_concurrency.lockutils [req-c28224ed-b8d4-431e-85a0-ec9f143d6fea req-b49dbabd-d069-4cf3-a5d0-d7ef8ac9f002 service nova] Acquiring lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.846897] env[62820]: DEBUG nova.network.neutron [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updating instance_info_cache with network_info: [{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.870506] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695502, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.919152] env[62820]: DEBUG nova.scheduler.client.report [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1460.974440] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695503, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.986781] env[62820]: DEBUG nova.compute.manager [req-6c8f6104-399d-486c-a450-435420d4b1e9 req-2ea07e59-96d3-4497-b129-cfc06fd75688 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Received event network-changed-52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1460.987230] env[62820]: DEBUG nova.compute.manager [req-6c8f6104-399d-486c-a450-435420d4b1e9 req-2ea07e59-96d3-4497-b129-cfc06fd75688 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Refreshing instance network info cache due to event network-changed-52edbaed-89b5-4d7c-9398-b22a3e8b22be. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1460.987545] env[62820]: DEBUG oslo_concurrency.lockutils [req-6c8f6104-399d-486c-a450-435420d4b1e9 req-2ea07e59-96d3-4497-b129-cfc06fd75688 service nova] Acquiring lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.987777] env[62820]: DEBUG oslo_concurrency.lockutils [req-6c8f6104-399d-486c-a450-435420d4b1e9 req-2ea07e59-96d3-4497-b129-cfc06fd75688 service nova] Acquired lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.988029] env[62820]: DEBUG nova.network.neutron [req-6c8f6104-399d-486c-a450-435420d4b1e9 req-2ea07e59-96d3-4497-b129-cfc06fd75688 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Refreshing network info cache for port 52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1461.037635] env[62820]: DEBUG nova.network.neutron [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Successfully updated port: 2e2c8225-56f6-4223-a6ea-721acb71e411 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1461.068183] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6982dce8-c16e-4f1a-b836-990a9032e6d6 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "1926c780-faea-40d8-a00b-6ad576349a68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.366s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.093743] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695504, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.097517] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695505, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.122523] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695506, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.351054] env[62820]: DEBUG oslo_concurrency.lockutils [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Releasing lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.351054] env[62820]: DEBUG nova.compute.manager [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Inject network info {{(pid=62820) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7570}} [ 1461.351632] env[62820]: DEBUG nova.compute.manager [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] network_info to inject: |[{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7571}} [ 1461.359498] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Reconfiguring VM instance to set the machine id {{(pid=62820) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1461.360092] env[62820]: DEBUG oslo_concurrency.lockutils [req-c28224ed-b8d4-431e-85a0-ec9f143d6fea req-b49dbabd-d069-4cf3-a5d0-d7ef8ac9f002 service nova] Acquired lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.360442] env[62820]: DEBUG nova.network.neutron [req-c28224ed-b8d4-431e-85a0-ec9f143d6fea req-b49dbabd-d069-4cf3-a5d0-d7ef8ac9f002 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Refreshing network info cache for port 99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1461.362183] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce0338fc-a278-4782-860d-84e65b6c3a78 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.387947] env[62820]: DEBUG nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1461.407764] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695502, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.410393] env[62820]: DEBUG oslo_vmware.api [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1461.410393] env[62820]: value = "task-1695507" [ 1461.410393] env[62820]: _type = "Task" [ 1461.410393] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.420317] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1461.420591] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1461.420773] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1461.420977] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1461.421176] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1461.421361] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1461.421643] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1461.421845] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1461.422052] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1461.422257] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1461.422470] env[62820]: DEBUG nova.virt.hardware [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1461.423394] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50190a20-7065-4260-bda0-e1bc65d8cb62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.432114] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.434193] env[62820]: DEBUG oslo_vmware.api [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695507, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.435420] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 32.006s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.443843] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07dfbd6e-f78e-4eac-a990-97a1bcedca7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.463590] env[62820]: INFO nova.scheduler.client.report [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Deleted allocations for instance 06fb6034-e010-49bd-9e5e-7699a43dd5a9 [ 1461.479220] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525195} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.479220] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1461.479220] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1461.479220] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3363ceec-4a66-40a2-9d70-6cfc8965ff93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.485441] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1461.485441] env[62820]: value = "task-1695508" [ 1461.485441] env[62820]: _type = "Task" [ 1461.485441] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.499260] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695508, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.543030] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.543434] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.543628] env[62820]: DEBUG nova.network.neutron [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1461.596828] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695505, 'name': ReconfigVM_Task, 'duration_secs': 0.939488} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.600867] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Reconfigured VM instance instance-00000012 to attach disk [datastore1] volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5/volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1461.606038] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695504, 'name': Destroy_Task, 'duration_secs': 1.022259} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.606297] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15b66882-5bb2-4b84-b12c-bfe48f88e924 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.616196] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Destroyed the VM [ 1461.616662] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1461.619253] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-13b5a0f2-00d6-42d6-bdca-5ec13021b8bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.631048] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695506, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.824752} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.633299] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b6c58867-914e-4e6e-8092-fc8991dc87f7/b6c58867-914e-4e6e-8092-fc8991dc87f7.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1461.633551] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1461.633869] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1461.633869] env[62820]: value = "task-1695509" [ 1461.633869] env[62820]: _type = "Task" [ 1461.633869] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.634151] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1461.634151] env[62820]: value = "task-1695510" [ 1461.634151] env[62820]: _type = "Task" [ 1461.634151] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.634924] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb2e036d-95c9-4d7a-b801-5736f7d9297e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.652178] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695509, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.660211] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1461.660211] env[62820]: value = "task-1695511" [ 1461.660211] env[62820]: _type = "Task" [ 1461.660211] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.660569] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695510, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.670998] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695511, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.746981] env[62820]: DEBUG nova.network.neutron [req-6c8f6104-399d-486c-a450-435420d4b1e9 req-2ea07e59-96d3-4497-b129-cfc06fd75688 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Updated VIF entry in instance network info cache for port 52edbaed-89b5-4d7c-9398-b22a3e8b22be. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.747529] env[62820]: DEBUG nova.network.neutron [req-6c8f6104-399d-486c-a450-435420d4b1e9 req-2ea07e59-96d3-4497-b129-cfc06fd75688 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Updating instance_info_cache with network_info: [{"id": "52edbaed-89b5-4d7c-9398-b22a3e8b22be", "address": "fa:16:3e:be:fa:53", "network": {"id": "71e54fc0-96e2-4d3e-819d-c545cdd4f052", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1297533262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f59ab047666940c6bcb633a221194395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52edbaed-89", "ovs_interfaceid": "52edbaed-89b5-4d7c-9398-b22a3e8b22be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.749493] env[62820]: DEBUG nova.network.neutron [req-c28224ed-b8d4-431e-85a0-ec9f143d6fea req-b49dbabd-d069-4cf3-a5d0-d7ef8ac9f002 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updated VIF entry in instance network info cache for port 99ce12db-7b90-44f1-8086-9f95246773fe. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.751414] env[62820]: DEBUG nova.network.neutron [req-c28224ed-b8d4-431e-85a0-ec9f143d6fea req-b49dbabd-d069-4cf3-a5d0-d7ef8ac9f002 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updating instance_info_cache with network_info: [{"id": "99ce12db-7b90-44f1-8086-9f95246773fe", "address": "fa:16:3e:dc:64:29", "network": {"id": "e56051ae-ca8d-4123-b3ce-41298e7a8017", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1339785792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7f22f2543c747b29127852290bd498c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99ce12db-7b", "ovs_interfaceid": "99ce12db-7b90-44f1-8086-9f95246773fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.872573] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695502, 'name': CloneVM_Task, 'duration_secs': 1.833394} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.872865] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Created linked-clone VM from snapshot [ 1461.873715] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cbec29-3a24-4703-84fb-f9f5ab72cffd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.882818] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Uploading image 1dfad5ef-e807-46a9-bde4-7b18b1a33ac2 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1461.896727] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1461.897037] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cd4ea3c6-c19d-4411-9149-460ce90254c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.905569] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1461.905569] env[62820]: value = "task-1695512" [ 1461.905569] env[62820]: _type = "Task" [ 1461.905569] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.916936] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695512, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.925369] env[62820]: DEBUG oslo_vmware.api [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695507, 'name': ReconfigVM_Task, 'duration_secs': 0.196202} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.925633] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-22890550-a25b-4b70-a645-ca302c89d3b6 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Reconfigured VM instance to set the machine id {{(pid=62820) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1461.941672] env[62820]: INFO nova.compute.claims [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1461.973818] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f136a32a-bbdb-487e-8790-47eb058ba046 tempest-AttachInterfacesV270Test-667496806 tempest-AttachInterfacesV270Test-667496806-project-member] Lock "06fb6034-e010-49bd-9e5e-7699a43dd5a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.789s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.996455] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695508, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07405} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.997211] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1461.997577] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc426d3d-1aa2-4dbe-8d96-a00f1b5a600e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.020442] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1462.021084] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59b88295-9d0b-4a01-8188-cd271a9d8426 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.048091] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1462.048091] env[62820]: value = "task-1695513" [ 1462.048091] env[62820]: _type = "Task" [ 1462.048091] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.056988] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695513, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.091915] env[62820]: WARNING nova.network.neutron [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] 26851e2e-dece-4dce-bec8-e64227003b80 already exists in list: networks containing: ['26851e2e-dece-4dce-bec8-e64227003b80']. ignoring it [ 1462.150655] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695510, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.153823] env[62820]: DEBUG oslo_vmware.api [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695509, 'name': RemoveSnapshot_Task, 'duration_secs': 0.518881} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.154201] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1462.154450] env[62820]: INFO nova.compute.manager [None req-c07ba531-4352-46e6-aa14-6a32743f20e9 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Took 16.15 seconds to snapshot the instance on the hypervisor. [ 1462.172296] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695511, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074635} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.172571] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1462.173517] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62ee8e1-ff56-4a94-9497-6d0706ad2b1a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.199258] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] b6c58867-914e-4e6e-8092-fc8991dc87f7/b6c58867-914e-4e6e-8092-fc8991dc87f7.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1462.200311] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8b8aaf0-f396-4459-8a5f-f8c28874df2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.221370] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1462.221370] env[62820]: value = "task-1695514" [ 1462.221370] env[62820]: _type = "Task" [ 1462.221370] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.230555] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695514, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.251858] env[62820]: DEBUG oslo_concurrency.lockutils [req-6c8f6104-399d-486c-a450-435420d4b1e9 req-2ea07e59-96d3-4497-b129-cfc06fd75688 service nova] Releasing lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.253611] env[62820]: DEBUG oslo_concurrency.lockutils [req-c28224ed-b8d4-431e-85a0-ec9f143d6fea req-b49dbabd-d069-4cf3-a5d0-d7ef8ac9f002 service nova] Releasing lock "refresh_cache-7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.416498] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695512, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.448442] env[62820]: INFO nova.compute.resource_tracker [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating resource usage from migration 69cbcaf9-61c5-420e-a1c4-1817dff9efce [ 1462.539810] env[62820]: DEBUG nova.network.neutron [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Successfully updated port: 243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1462.561215] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.650494] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695510, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.732827] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695514, 'name': ReconfigVM_Task, 'duration_secs': 0.292845} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.735422] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Reconfigured VM instance instance-0000002a to attach disk [datastore1] b6c58867-914e-4e6e-8092-fc8991dc87f7/b6c58867-914e-4e6e-8092-fc8991dc87f7.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1462.735742] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62820) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1462.736763] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-24277ca3-4a1f-4d7b-8947-19f1322b90b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.742294] env[62820]: DEBUG nova.network.neutron [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updating instance_info_cache with network_info: [{"id": "e5e97928-d469-42c4-9621-ed449eeebf5c", "address": "fa:16:3e:ec:4d:db", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5e97928-d4", "ovs_interfaceid": "e5e97928-d469-42c4-9621-ed449eeebf5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2e2c8225-56f6-4223-a6ea-721acb71e411", "address": "fa:16:3e:ea:37:a2", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2c8225-56", "ovs_interfaceid": "2e2c8225-56f6-4223-a6ea-721acb71e411", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.745242] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1462.745242] env[62820]: value = "task-1695515" [ 1462.745242] env[62820]: _type = "Task" [ 1462.745242] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.758758] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695515, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.859992] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a86ce69-060c-4656-adf0-f737771298a8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.868090] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2740862c-d252-411f-9992-3d043c780390 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.900239] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec591ccd-e501-4f64-846d-0fe400e8102a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.913032] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4475849d-a2b1-4109-83a6-0aa6870a51fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.931082] env[62820]: DEBUG nova.compute.provider_tree [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.932808] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695512, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.045951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.046199] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.046375] env[62820]: DEBUG nova.network.neutron [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1463.060485] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.149485] env[62820]: DEBUG oslo_vmware.api [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695510, 'name': ReconfigVM_Task, 'duration_secs': 1.191919} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.149815] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353516', 'volume_id': '02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'name': 'volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a06d736c-a704-46e8-a6f7-85d8be40804f', 'attached_at': '', 'detached_at': '', 'volume_id': '02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'serial': '02d0f274-09bd-42ba-8cf0-0c80226e94a5'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1463.212957] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.213259] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.213473] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.213657] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.213829] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.216105] env[62820]: INFO nova.compute.manager [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Terminating instance [ 1463.244929] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.245604] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.245764] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.246646] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458cd6a8-33cc-4716-bbca-4044415519b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.258708] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695515, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.06198} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.270460] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62820) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1463.271461] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1463.271685] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1463.271845] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1463.272043] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1463.272195] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1463.272347] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1463.272553] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1463.272708] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1463.272871] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1463.273047] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1463.273225] env[62820]: DEBUG nova.virt.hardware [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1463.279463] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Reconfiguring VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1463.280275] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f45e03-f80d-42da-ae65-cf1a0ba0bf49 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.284210] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26872dc8-db6c-46f0-b3d4-52c38cc7bb47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.320409] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] b6c58867-914e-4e6e-8092-fc8991dc87f7/ephemeral_0.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1463.322545] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a290392-43e7-48a4-b629-f10ba0eac57e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.335921] env[62820]: DEBUG oslo_vmware.api [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1463.335921] env[62820]: value = "task-1695516" [ 1463.335921] env[62820]: _type = "Task" [ 1463.335921] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.345173] env[62820]: DEBUG oslo_vmware.api [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695516, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.346785] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1463.346785] env[62820]: value = "task-1695517" [ 1463.346785] env[62820]: _type = "Task" [ 1463.346785] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.355275] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695517, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.418839] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695512, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.434142] env[62820]: DEBUG nova.scheduler.client.report [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1463.561939] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695513, 'name': ReconfigVM_Task, 'duration_secs': 1.340433} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.562314] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3/069f58d6-f6bc-4ded-8274-6fed7c2f45b3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1463.562994] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9456f0cf-c210-4475-a5bb-458efbb260de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.571224] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1463.571224] env[62820]: value = "task-1695518" [ 1463.571224] env[62820]: _type = "Task" [ 1463.571224] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.584879] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695518, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.598439] env[62820]: DEBUG nova.network.neutron [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1463.722703] env[62820]: DEBUG nova.compute.manager [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1463.722703] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1463.722703] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e9e011-0d8b-4c3e-b402-2c97fbc9972e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.735292] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1463.735292] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23898808-acd0-4e17-8380-6b0b2f3c6eb9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.746186] env[62820]: DEBUG oslo_vmware.api [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1463.746186] env[62820]: value = "task-1695519" [ 1463.746186] env[62820]: _type = "Task" [ 1463.746186] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.757570] env[62820]: DEBUG oslo_vmware.api [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695519, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.823294] env[62820]: DEBUG nova.network.neutron [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updating instance_info_cache with network_info: [{"id": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "address": "fa:16:3e:ca:13:89", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap243136d0-94", "ovs_interfaceid": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.849184] env[62820]: DEBUG oslo_vmware.api [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.863033] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695517, 'name': ReconfigVM_Task, 'duration_secs': 0.347384} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.863033] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Reconfigured VM instance instance-0000002a to attach disk [datastore1] b6c58867-914e-4e6e-8092-fc8991dc87f7/ephemeral_0.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1463.863628] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e8101a9-eb65-4ba1-977d-b707cdd5506c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.872862] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1463.872862] env[62820]: value = "task-1695520" [ 1463.872862] env[62820]: _type = "Task" [ 1463.872862] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.883571] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695520, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.923577] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695512, 'name': Destroy_Task, 'duration_secs': 1.551737} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.923577] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Destroyed the VM [ 1463.923577] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1463.923882] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-18a8b1be-771a-481b-9b83-5dd530e876f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.932139] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1463.932139] env[62820]: value = "task-1695521" [ 1463.932139] env[62820]: _type = "Task" [ 1463.932139] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.939607] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.505s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.939880] env[62820]: INFO nova.compute.manager [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Migrating [ 1463.940182] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.940385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.945425] env[62820]: DEBUG oslo_concurrency.lockutils [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.377s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.948727] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695521, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.081959] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695518, 'name': Rename_Task, 'duration_secs': 0.190969} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.084878] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1464.085683] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8bd192c-8581-4d14-978e-f2cc1c397e10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.095502] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1464.095502] env[62820]: value = "task-1695522" [ 1464.095502] env[62820]: _type = "Task" [ 1464.095502] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.108963] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.179037] env[62820]: DEBUG nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received event network-vif-plugged-2e2c8225-56f6-4223-a6ea-721acb71e411 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1464.179037] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Acquiring lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.181533] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.181533] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.181533] env[62820]: DEBUG nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] No waiting events found dispatching network-vif-plugged-2e2c8225-56f6-4223-a6ea-721acb71e411 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1464.181533] env[62820]: WARNING nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received unexpected event network-vif-plugged-2e2c8225-56f6-4223-a6ea-721acb71e411 for instance with vm_state active and task_state None. [ 1464.181533] env[62820]: DEBUG nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received event network-changed-2e2c8225-56f6-4223-a6ea-721acb71e411 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1464.181533] env[62820]: DEBUG nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Refreshing instance network info cache due to event network-changed-2e2c8225-56f6-4223-a6ea-721acb71e411. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1464.181533] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Acquiring lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.181533] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Acquired lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.181533] env[62820]: DEBUG nova.network.neutron [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Refreshing network info cache for port 2e2c8225-56f6-4223-a6ea-721acb71e411 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1464.201319] env[62820]: DEBUG nova.objects.instance [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lazy-loading 'flavor' on Instance uuid a06d736c-a704-46e8-a6f7-85d8be40804f {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1464.261339] env[62820]: DEBUG oslo_vmware.api [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695519, 'name': PowerOffVM_Task, 'duration_secs': 0.198183} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.261826] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1464.261955] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1464.262387] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bea1b45a-ac36-4716-ad97-b2d1bece927b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.325684] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Releasing lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.326050] env[62820]: DEBUG nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance network_info: |[{"id": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "address": "fa:16:3e:ca:13:89", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap243136d0-94", "ovs_interfaceid": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1464.326613] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:13:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a485857d-7086-4dcf-9d65-d0dcd177fcb0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '243136d0-94ab-4229-ba69-f9a74d65fcc0', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1464.334176] env[62820]: DEBUG oslo.service.loopingcall [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1464.337126] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1464.337630] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0796ec4-bec4-44df-b20c-6785e714f661 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.362648] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1464.362877] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1464.363065] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Deleting the datastore file [datastore1] 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1464.363635] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69868fae-83f0-4573-9fe0-3ec6910f87ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.370682] env[62820]: DEBUG oslo_vmware.api [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695516, 'name': ReconfigVM_Task, 'duration_secs': 0.870008} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.372911] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.373193] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Reconfigured VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1464.376029] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1464.376029] env[62820]: value = "task-1695524" [ 1464.376029] env[62820]: _type = "Task" [ 1464.376029] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.376029] env[62820]: DEBUG oslo_vmware.api [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for the task: (returnval){ [ 1464.376029] env[62820]: value = "task-1695525" [ 1464.376029] env[62820]: _type = "Task" [ 1464.376029] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.401907] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695520, 'name': Rename_Task, 'duration_secs': 0.18989} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.408298] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1464.408939] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695524, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.409217] env[62820]: DEBUG oslo_vmware.api [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.412071] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ba2e92f-6045-4634-bea4-598618536596 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.425249] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1464.425249] env[62820]: value = "task-1695526" [ 1464.425249] env[62820]: _type = "Task" [ 1464.425249] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.439512] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695526, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.441669] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287cd3d2-f66a-4ab2-8eca-b333344657b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.449061] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695521, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.450066] env[62820]: INFO nova.compute.rpcapi [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 1464.450632] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.466751] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17984d8-3123-4420-9218-0a96b92bb411 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.509957] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2224b27a-e874-4984-ad1d-049efd172dfe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.519895] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6581e4-af59-48a9-a839-45e58cb8d7d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.537651] env[62820]: DEBUG nova.compute.provider_tree [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.608200] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695522, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.648510] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "f186854d-3f0a-4512-83b9-2c946247ccbe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.648951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "f186854d-3f0a-4512-83b9-2c946247ccbe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.649284] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "f186854d-3f0a-4512-83b9-2c946247ccbe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.649616] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "f186854d-3f0a-4512-83b9-2c946247ccbe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.650019] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "f186854d-3f0a-4512-83b9-2c946247ccbe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.652634] env[62820]: INFO nova.compute.manager [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Terminating instance [ 1464.707496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-453939f2-6034-4bab-87d9-f5167c63661b tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.364s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.879135] env[62820]: DEBUG oslo_concurrency.lockutils [None req-79bfc327-5e1a-4bdb-8bd2-aa465c2f90ee tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.259s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.894018] env[62820]: DEBUG oslo_vmware.api [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Task: {'id': task-1695525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206295} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.901339] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1464.901339] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1464.901339] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1464.901492] env[62820]: INFO nova.compute.manager [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1464.901627] env[62820]: DEBUG oslo.service.loopingcall [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1464.901807] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695524, 'name': CreateVM_Task, 'duration_secs': 0.453596} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.902321] env[62820]: DEBUG nova.compute.manager [-] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1464.902421] env[62820]: DEBUG nova.network.neutron [-] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1464.903811] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1464.904788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.904955] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.905515] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1464.906272] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1959cff-ab4c-4163-a782-bb8768ddaa79 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.912529] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1464.912529] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52745944-2f54-5a0d-2bde-f7ae46560359" [ 1464.912529] env[62820]: _type = "Task" [ 1464.912529] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.921965] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52745944-2f54-5a0d-2bde-f7ae46560359, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.924518] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquiring lock "1926c780-faea-40d8-a00b-6ad576349a68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.924744] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "1926c780-faea-40d8-a00b-6ad576349a68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.924943] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquiring lock "1926c780-faea-40d8-a00b-6ad576349a68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.925546] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "1926c780-faea-40d8-a00b-6ad576349a68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.925546] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "1926c780-faea-40d8-a00b-6ad576349a68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.927829] env[62820]: INFO nova.compute.manager [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Terminating instance [ 1464.952106] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695526, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.957786] env[62820]: DEBUG oslo_vmware.api [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695521, 'name': RemoveSnapshot_Task, 'duration_secs': 0.789078} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.958044] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1464.978357] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.978357] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.978527] env[62820]: DEBUG nova.network.neutron [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1465.040851] env[62820]: DEBUG nova.scheduler.client.report [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1465.096058] env[62820]: DEBUG nova.network.neutron [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updated VIF entry in instance network info cache for port 2e2c8225-56f6-4223-a6ea-721acb71e411. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1465.096581] env[62820]: DEBUG nova.network.neutron [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updating instance_info_cache with network_info: [{"id": "e5e97928-d469-42c4-9621-ed449eeebf5c", "address": "fa:16:3e:ec:4d:db", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5e97928-d4", "ovs_interfaceid": "e5e97928-d469-42c4-9621-ed449eeebf5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2e2c8225-56f6-4223-a6ea-721acb71e411", "address": "fa:16:3e:ea:37:a2", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2c8225-56", "ovs_interfaceid": "2e2c8225-56f6-4223-a6ea-721acb71e411", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.110093] env[62820]: DEBUG oslo_vmware.api [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695522, 'name': PowerOnVM_Task, 'duration_secs': 0.687834} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.110332] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1465.110555] env[62820]: DEBUG nova.compute.manager [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1465.111530] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76bcd67-38f6-4506-a33c-00a6edf09fba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.157579] env[62820]: DEBUG nova.compute.manager [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1465.157839] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1465.158968] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547c07ff-f7ec-41fc-ab1e-f4a00c3e686b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.174820] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1465.175156] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bec4854b-1e97-4590-bd9d-c6836e9b592e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.284985] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1465.285240] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1465.285423] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleting the datastore file [datastore1] f186854d-3f0a-4512-83b9-2c946247ccbe {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1465.285684] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07c8d753-a75d-4a45-b19e-c5bccc112fac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.293978] env[62820]: DEBUG oslo_vmware.api [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1465.293978] env[62820]: value = "task-1695528" [ 1465.293978] env[62820]: _type = "Task" [ 1465.293978] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.304123] env[62820]: DEBUG oslo_vmware.api [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.422943] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52745944-2f54-5a0d-2bde-f7ae46560359, 'name': SearchDatastore_Task, 'duration_secs': 0.021612} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.423300] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.423547] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1465.423788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.423952] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.424163] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1465.424437] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-320ce8eb-9356-4033-bfc0-f8e01f05d235 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.436853] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1465.437218] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1465.438650] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-331d1e53-1898-4ae4-bd65-b2f38420d810 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.444475] env[62820]: DEBUG nova.compute.manager [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1465.444862] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1465.444975] env[62820]: DEBUG oslo_vmware.api [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695526, 'name': PowerOnVM_Task, 'duration_secs': 0.787215} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.446057] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab58519-dcfb-4d41-a0af-43195b411baa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.448934] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1465.449181] env[62820]: INFO nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1465.449433] env[62820]: DEBUG nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1465.451302] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82af6534-bc24-4095-b597-d64bcb18f26d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.453820] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1465.453820] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ed5271-e3a6-7d90-ddb1-d097f936036d" [ 1465.453820] env[62820]: _type = "Task" [ 1465.453820] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.461327] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1465.462541] env[62820]: WARNING nova.compute.manager [None req-e4018522-085a-4f10-bc88-ea18b1b70bab tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Image not found during snapshot: nova.exception.ImageNotFound: Image 1dfad5ef-e807-46a9-bde4-7b18b1a33ac2 could not be found. [ 1465.467586] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-974199ee-c6c0-4543-a758-c82bea436828 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.473606] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ed5271-e3a6-7d90-ddb1-d097f936036d, 'name': SearchDatastore_Task, 'duration_secs': 0.022054} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.474508] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-116b3ba0-0d16-4b10-87af-f4c3e1ca9c88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.485976] env[62820]: DEBUG oslo_vmware.api [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1465.485976] env[62820]: value = "task-1695529" [ 1465.485976] env[62820]: _type = "Task" [ 1465.485976] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.487383] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1465.487383] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5222f270-1777-1ffc-f2d3-0c1fc9323abb" [ 1465.487383] env[62820]: _type = "Task" [ 1465.487383] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.500820] env[62820]: DEBUG oslo_vmware.api [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695529, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.505535] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5222f270-1777-1ffc-f2d3-0c1fc9323abb, 'name': SearchDatastore_Task, 'duration_secs': 0.012143} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.505892] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.506242] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 498236b7-3688-4ab1-a604-a9737ba058e8/498236b7-3688-4ab1-a604-a9737ba058e8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1465.506525] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9b057fa-570c-43fb-a890-f6b6ada2d184 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.515396] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1465.515396] env[62820]: value = "task-1695530" [ 1465.515396] env[62820]: _type = "Task" [ 1465.515396] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.526868] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.551466] env[62820]: DEBUG oslo_concurrency.lockutils [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.606s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.551696] env[62820]: INFO nova.compute.manager [None req-366f2a46-3cba-459d-ad08-c145731e18e3 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Successfully reverted task state from rebuilding on failure for instance. [ 1465.559365] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 31.944s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.603029] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Releasing lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.603501] env[62820]: DEBUG nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Received event network-vif-plugged-243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1465.603883] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Acquiring lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.604121] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.604384] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.604637] env[62820]: DEBUG nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] No waiting events found dispatching network-vif-plugged-243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1465.604989] env[62820]: WARNING nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Received unexpected event network-vif-plugged-243136d0-94ab-4229-ba69-f9a74d65fcc0 for instance with vm_state building and task_state spawning. [ 1465.605154] env[62820]: DEBUG nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Received event network-changed-243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1465.605409] env[62820]: DEBUG nova.compute.manager [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Refreshing instance network info cache due to event network-changed-243136d0-94ab-4229-ba69-f9a74d65fcc0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1465.605706] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Acquiring lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.605924] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Acquired lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.606184] env[62820]: DEBUG nova.network.neutron [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Refreshing network info cache for port 243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1465.631822] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.807774] env[62820]: DEBUG oslo_vmware.api [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308474} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.810240] env[62820]: DEBUG nova.network.neutron [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance_info_cache with network_info: [{"id": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "address": "fa:16:3e:04:22:08", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af0a5c5-a1", "ovs_interfaceid": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.811631] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1465.812680] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1465.812680] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1465.812680] env[62820]: INFO nova.compute.manager [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1465.812680] env[62820]: DEBUG oslo.service.loopingcall [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1465.812916] env[62820]: DEBUG nova.compute.manager [-] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1465.813024] env[62820]: DEBUG nova.network.neutron [-] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1465.980095] env[62820]: INFO nova.compute.manager [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Took 42.06 seconds to build instance. [ 1466.002133] env[62820]: DEBUG oslo_vmware.api [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695529, 'name': PowerOffVM_Task, 'duration_secs': 0.351539} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.002483] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1466.002918] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1466.003057] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6e02611-f3b3-406a-a617-a52a1b65f0d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.029709] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695530, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.105060] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1466.105060] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1466.105361] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Deleting the datastore file [datastore1] 1926c780-faea-40d8-a00b-6ad576349a68 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1466.105671] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aec03013-7f75-400e-adca-a68f482e5d02 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.116578] env[62820]: DEBUG oslo_vmware.api [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for the task: (returnval){ [ 1466.116578] env[62820]: value = "task-1695532" [ 1466.116578] env[62820]: _type = "Task" [ 1466.116578] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.129385] env[62820]: DEBUG oslo_vmware.api [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.174212] env[62820]: DEBUG nova.network.neutron [-] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.181575] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "9114a81d-86a9-493b-9c07-c4724a0588ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.181679] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "9114a81d-86a9-493b-9c07-c4724a0588ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.315716] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.440900] env[62820]: DEBUG nova.network.neutron [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updated VIF entry in instance network info cache for port 243136d0-94ab-4229-ba69-f9a74d65fcc0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1466.441334] env[62820]: DEBUG nova.network.neutron [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updating instance_info_cache with network_info: [{"id": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "address": "fa:16:3e:ca:13:89", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap243136d0-94", "ovs_interfaceid": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.483432] env[62820]: DEBUG oslo_concurrency.lockutils [None req-864b7faf-205c-4e81-ac3d-5602e135303a tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "b6c58867-914e-4e6e-8092-fc8991dc87f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.575s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.533338] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566061} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.533338] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 498236b7-3688-4ab1-a604-a9737ba058e8/498236b7-3688-4ab1-a604-a9737ba058e8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1466.533338] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1466.533338] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-913ce187-9cc6-4cb2-bfaa-29c939151a4b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.539981] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1466.539981] env[62820]: value = "task-1695533" [ 1466.539981] env[62820]: _type = "Task" [ 1466.539981] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.554289] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695533, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.579147] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Applying migration context for instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 as it has an incoming, in-progress migration 17065b4d-ea93-42e5-aca0-e553248f0e35. Migration status is error {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1466.579147] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Applying migration context for instance ab21fd61-3a44-42fa-92be-51214b0a9a1e as it has an incoming, in-progress migration 69cbcaf9-61c5-420e-a1c4-1817dff9efce. Migration status is pre-migrating {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1466.580924] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=62820) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1466.581185] env[62820]: INFO nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating resource usage from migration 69cbcaf9-61c5-420e-a1c4-1817dff9efce [ 1466.610923] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 069f58d6-f6bc-4ded-8274-6fed7c2f45b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.610923] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.610923] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 90ea0c16-739a-4132-ac36-e154a846b9c2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1466.610923] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 9910a0ea-5ce0-41e9-b449-da729a4c3223 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1466.610923] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.610923] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a06d736c-a704-46e8-a6f7-85d8be40804f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.611378] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 519c961c-557e-4796-88da-047c55d6be44 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1466.611547] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 58a26c98-cbf9-491f-8d2c-20281c3d7771 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.611683] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0eb62424-0ee6-4ff4-94c2-bb6a10861759 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1466.611804] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.612127] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance aa98dbb0-5ff7-4da5-a365-2b55a8bd2216 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.612387] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.612504] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 31639194-b0c4-4eb9-a6f4-e61b067c807f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1466.612571] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 262d0714-d7d7-443c-9927-ef03ba9f230e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1466.612645] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 706d42cd-53d9-4976-bc67-98816a40fff4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.612763] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance f186854d-3f0a-4512-83b9-2c946247ccbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.612871] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance e45cdcfb-f2ce-4798-8e97-1c3f95e61db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.613352] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 1926c780-faea-40d8-a00b-6ad576349a68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.613352] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b6c58867-914e-4e6e-8092-fc8991dc87f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.613544] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 498236b7-3688-4ab1-a604-a9737ba058e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.613544] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Migration 69cbcaf9-61c5-420e-a1c4-1817dff9efce is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1466.613662] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance ab21fd61-3a44-42fa-92be-51214b0a9a1e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.627889] env[62820]: DEBUG oslo_vmware.api [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Task: {'id': task-1695532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254219} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.628755] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1466.628948] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1466.629151] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1466.629332] env[62820]: INFO nova.compute.manager [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1466.629569] env[62820]: DEBUG oslo.service.loopingcall [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1466.629756] env[62820]: DEBUG nova.compute.manager [-] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1466.629852] env[62820]: DEBUG nova.network.neutron [-] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1466.678883] env[62820]: INFO nova.compute.manager [-] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Took 1.78 seconds to deallocate network for instance. [ 1466.679329] env[62820]: DEBUG nova.network.neutron [-] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.685185] env[62820]: DEBUG nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1466.917490] env[62820]: DEBUG nova.compute.manager [req-b9a0a187-8ff1-4186-b5fa-8d7be689c50c req-52f84dea-d0c1-41ef-bf75-3fb6f8d11025 service nova] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Received event network-vif-deleted-99ce12db-7b90-44f1-8086-9f95246773fe {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1466.917724] env[62820]: DEBUG nova.compute.manager [req-b9a0a187-8ff1-4186-b5fa-8d7be689c50c req-52f84dea-d0c1-41ef-bf75-3fb6f8d11025 service nova] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Received event network-vif-deleted-7aa70f31-5a35-418a-a31b-0258e18a6cf7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1466.943464] env[62820]: DEBUG oslo_concurrency.lockutils [req-4a034f26-1f48-40cc-b947-9c6b97aa016b req-af93eb22-6558-4dd9-9cca-0feeae58c6ba service nova] Releasing lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.054417] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695533, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074085} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.054417] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1467.054417] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f63cd96-24cd-4b59-b0c3-031a5b2e96e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.078699] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 498236b7-3688-4ab1-a604-a9737ba058e8/498236b7-3688-4ab1-a604-a9737ba058e8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1467.079061] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fbfc89f-b39e-4a5d-9263-ff8f46bb3116 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.100109] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1467.100109] env[62820]: value = "task-1695534" [ 1467.100109] env[62820]: _type = "Task" [ 1467.100109] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.109174] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695534, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.118143] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 09ab63ae-fd36-4915-8c59-9d9bc5833288 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1467.185790] env[62820]: INFO nova.compute.manager [-] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Took 1.37 seconds to deallocate network for instance. [ 1467.197381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.215787] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.431731] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "a06d736c-a704-46e8-a6f7-85d8be40804f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.432162] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.451511] env[62820]: DEBUG nova.network.neutron [-] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.486423] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.486423] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.486423] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.486423] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.486938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.490119] env[62820]: INFO nova.compute.manager [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Terminating instance [ 1467.612077] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695534, 'name': ReconfigVM_Task, 'duration_secs': 0.474389} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.612374] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 498236b7-3688-4ab1-a604-a9737ba058e8/498236b7-3688-4ab1-a604-a9737ba058e8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1467.613029] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba07d8d9-6d04-42e1-a03e-38803026de99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.620780] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1467.622815] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1467.622815] env[62820]: value = "task-1695535" [ 1467.622815] env[62820]: _type = "Task" [ 1467.622815] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.637272] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695535, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.692031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.841768] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4932a68-9d8e-4d07-adb8-cb62ace77328 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.870781] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance 'ab21fd61-3a44-42fa-92be-51214b0a9a1e' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1467.939013] env[62820]: INFO nova.compute.manager [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Detaching volume 02d0f274-09bd-42ba-8cf0-0c80226e94a5 [ 1467.958474] env[62820]: INFO nova.compute.manager [-] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Took 1.33 seconds to deallocate network for instance. [ 1467.994528] env[62820]: INFO nova.virt.block_device [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Attempting to driver detach volume 02d0f274-09bd-42ba-8cf0-0c80226e94a5 from mountpoint /dev/sdb [ 1467.995305] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1467.995305] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353516', 'volume_id': '02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'name': 'volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a06d736c-a704-46e8-a6f7-85d8be40804f', 'attached_at': '', 'detached_at': '', 'volume_id': '02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'serial': '02d0f274-09bd-42ba-8cf0-0c80226e94a5'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1467.996312] env[62820]: DEBUG nova.compute.manager [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1467.996312] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1467.996857] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee06b2e5-19f3-4c61-9a9c-042f602ca800 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.003247] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2964f9af-8cdf-4917-abf2-18fcea59c238 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.033699] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ba3959-fd81-4bc7-835c-4e9f9a9f3420 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.037326] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1468.037326] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7652b082-c0ca-4beb-b84f-0f4c69c7bb3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.045105] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a09786a-62f4-4737-aa53-1939550546d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.050030] env[62820]: DEBUG oslo_vmware.api [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1468.050030] env[62820]: value = "task-1695536" [ 1468.050030] env[62820]: _type = "Task" [ 1468.050030] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.077321] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13b37db-13b2-4326-9c2c-36e398f4382f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.083429] env[62820]: DEBUG oslo_vmware.api [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.098885] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] The volume has not been displaced from its original location: [datastore1] volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5/volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1468.103768] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Reconfiguring VM instance instance-00000012 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1468.104143] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7aa67e66-adaf-40ea-bacf-1c6a2461e0e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.124245] env[62820]: DEBUG oslo_vmware.api [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1468.124245] env[62820]: value = "task-1695537" [ 1468.124245] env[62820]: _type = "Task" [ 1468.124245] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.127847] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 56c371a9-983f-4d5f-8abf-0183736c374c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1468.139880] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695535, 'name': Rename_Task, 'duration_secs': 0.151283} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.142040] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1468.142328] env[62820]: DEBUG oslo_vmware.api [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695537, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.143205] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a6ee168-4b86-420e-b43c-fa7250e2a9b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.152356] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1468.152356] env[62820]: value = "task-1695538" [ 1468.152356] env[62820]: _type = "Task" [ 1468.152356] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.161183] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695538, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.185795] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-2e2c8225-56f6-4223-a6ea-721acb71e411" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.186168] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-2e2c8225-56f6-4223-a6ea-721acb71e411" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.380800] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1468.381198] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5cafaf1-b017-4708-9807-f500b89fd745 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.392844] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1468.392844] env[62820]: value = "task-1695539" [ 1468.392844] env[62820]: _type = "Task" [ 1468.392844] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.404049] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695539, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.475347] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.560868] env[62820]: DEBUG oslo_vmware.api [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695536, 'name': PowerOffVM_Task, 'duration_secs': 0.364189} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.561168] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1468.561378] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1468.561676] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cba333f3-1698-4d7a-ac13-8ff0cf9f120f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.633405] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b7c9f518-c908-42cc-ba09-59b0f8431f68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1468.642934] env[62820]: DEBUG oslo_vmware.api [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695537, 'name': ReconfigVM_Task, 'duration_secs': 0.365629} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.643489] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Reconfigured VM instance instance-00000012 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1468.648904] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aaf63cde-5cc8-4409-a7a5-ac8d65d84398 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.677599] env[62820]: DEBUG oslo_vmware.api [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695538, 'name': PowerOnVM_Task, 'duration_secs': 0.516348} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.680972] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1468.681844] env[62820]: INFO nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Took 7.29 seconds to spawn the instance on the hypervisor. [ 1468.682307] env[62820]: DEBUG nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1468.682690] env[62820]: DEBUG oslo_vmware.api [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1468.682690] env[62820]: value = "task-1695541" [ 1468.682690] env[62820]: _type = "Task" [ 1468.682690] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.686145] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1468.686145] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1468.686145] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleting the datastore file [datastore1] e45cdcfb-f2ce-4798-8e97-1c3f95e61db3 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1468.686145] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17777e3-79d7-4ad7-8743-b7dce2660c6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.688970] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4161e2d2-489d-47ca-99d5-3ef237aa707f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.696263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.696263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.696779] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c0686c-36ff-4d40-a8bd-312d8527efee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.708656] env[62820]: DEBUG oslo_vmware.api [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1468.708656] env[62820]: value = "task-1695542" [ 1468.708656] env[62820]: _type = "Task" [ 1468.708656] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.713568] env[62820]: DEBUG oslo_vmware.api [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695541, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.730652] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9a8bee-b740-4992-9b12-984a5e37b4f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.736405] env[62820]: DEBUG oslo_vmware.api [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695542, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.755411] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "706d42cd-53d9-4976-bc67-98816a40fff4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.755623] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "706d42cd-53d9-4976-bc67-98816a40fff4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.755826] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "706d42cd-53d9-4976-bc67-98816a40fff4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.756082] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "706d42cd-53d9-4976-bc67-98816a40fff4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.756229] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "706d42cd-53d9-4976-bc67-98816a40fff4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.764420] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Reconfiguring VM to detach interface {{(pid=62820) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1468.767030] env[62820]: INFO nova.compute.manager [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Terminating instance [ 1468.767464] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db7a138c-59ce-4b48-843c-2fa5cec0b0d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.791134] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1468.791134] env[62820]: value = "task-1695543" [ 1468.791134] env[62820]: _type = "Task" [ 1468.791134] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.800264] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.908024] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695539, 'name': PowerOffVM_Task, 'duration_secs': 0.251116} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.908024] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1468.908024] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance 'ab21fd61-3a44-42fa-92be-51214b0a9a1e' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1469.021145] env[62820]: DEBUG nova.compute.manager [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Received event network-vif-deleted-c46ea4ef-6d34-4889-b119-49077f2482b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1469.021145] env[62820]: DEBUG nova.compute.manager [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Received event network-changed-52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1469.021145] env[62820]: DEBUG nova.compute.manager [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Refreshing instance network info cache due to event network-changed-52edbaed-89b5-4d7c-9398-b22a3e8b22be. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1469.021145] env[62820]: DEBUG oslo_concurrency.lockutils [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] Acquiring lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.021920] env[62820]: DEBUG oslo_concurrency.lockutils [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] Acquired lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.022194] env[62820]: DEBUG nova.network.neutron [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Refreshing network info cache for port 52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1469.136931] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance c06e3dcd-b997-497c-865d-5f277695cd7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1469.200698] env[62820]: DEBUG oslo_vmware.api [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695541, 'name': ReconfigVM_Task, 'duration_secs': 0.175137} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.201223] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353516', 'volume_id': '02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'name': 'volume-02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a06d736c-a704-46e8-a6f7-85d8be40804f', 'attached_at': '', 'detached_at': '', 'volume_id': '02d0f274-09bd-42ba-8cf0-0c80226e94a5', 'serial': '02d0f274-09bd-42ba-8cf0-0c80226e94a5'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1469.224064] env[62820]: INFO nova.compute.manager [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Took 43.30 seconds to build instance. [ 1469.232690] env[62820]: DEBUG oslo_vmware.api [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695542, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.508145} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.232690] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1469.232690] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1469.232951] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1469.233301] env[62820]: INFO nova.compute.manager [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1469.233420] env[62820]: DEBUG oslo.service.loopingcall [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1469.233580] env[62820]: DEBUG nova.compute.manager [-] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1469.233880] env[62820]: DEBUG nova.network.neutron [-] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1469.284353] env[62820]: DEBUG nova.compute.manager [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1469.284603] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1469.285996] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2528d7ce-1af8-472d-8ad6-2cf7e30248ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.303653] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.304927] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1469.305334] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d76cfb74-db95-4eee-8a69-68e628cbb727 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.313841] env[62820]: DEBUG oslo_vmware.api [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1469.313841] env[62820]: value = "task-1695544" [ 1469.313841] env[62820]: _type = "Task" [ 1469.313841] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.322781] env[62820]: DEBUG oslo_vmware.api [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.415142] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1469.415142] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1469.415142] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1469.415142] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1469.415142] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1469.415142] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1469.415709] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1469.416052] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1469.416714] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1469.417052] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1469.417365] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1469.423316] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-525b32fb-3cc7-46b6-8c0e-2557473df926 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.442031] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1469.442031] env[62820]: value = "task-1695545" [ 1469.442031] env[62820]: _type = "Task" [ 1469.442031] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.451415] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695545, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.639815] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance ee188979-e740-4125-a17f-1c02ef9588f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1469.728031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd044877-8b82-4830-8f5e-cf57d824df2f tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.807s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.766562] env[62820]: DEBUG nova.objects.instance [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lazy-loading 'flavor' on Instance uuid a06d736c-a704-46e8-a6f7-85d8be40804f {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1469.810225] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.830927] env[62820]: DEBUG oslo_vmware.api [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695544, 'name': PowerOffVM_Task, 'duration_secs': 0.257578} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.833431] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1469.833611] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1469.833868] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d237f658-badb-4823-874c-5558fba9f278 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.924590] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.927024] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.927024] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1469.927024] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1469.927024] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleting the datastore file [datastore1] 706d42cd-53d9-4976-bc67-98816a40fff4 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1469.927985] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77bd8822-4f14-4fb3-adcf-44f0489596f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.938044] env[62820]: DEBUG oslo_vmware.api [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1469.938044] env[62820]: value = "task-1695547" [ 1469.938044] env[62820]: _type = "Task" [ 1469.938044] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.952142] env[62820]: DEBUG oslo_vmware.api [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.959970] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695545, 'name': ReconfigVM_Task, 'duration_secs': 0.161655} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.960874] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance 'ab21fd61-3a44-42fa-92be-51214b0a9a1e' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1469.987056] env[62820]: DEBUG nova.network.neutron [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Updated VIF entry in instance network info cache for port 52edbaed-89b5-4d7c-9398-b22a3e8b22be. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1469.987267] env[62820]: DEBUG nova.network.neutron [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Updating instance_info_cache with network_info: [{"id": "52edbaed-89b5-4d7c-9398-b22a3e8b22be", "address": "fa:16:3e:be:fa:53", "network": {"id": "71e54fc0-96e2-4d3e-819d-c545cdd4f052", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1297533262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f59ab047666940c6bcb633a221194395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52edbaed-89", "ovs_interfaceid": "52edbaed-89b5-4d7c-9398-b22a3e8b22be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.147906] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 9114a81d-86a9-493b-9c07-c4724a0588ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.148230] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1470.148420] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1470.153131] env[62820]: DEBUG nova.compute.manager [req-3e62fa25-d4c3-42ef-99ca-03f7837ec88d req-e7953b6c-c59f-4b8d-a813-1861238d132d service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Received event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1470.153331] env[62820]: DEBUG nova.compute.manager [req-3e62fa25-d4c3-42ef-99ca-03f7837ec88d req-e7953b6c-c59f-4b8d-a813-1861238d132d service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing instance network info cache due to event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1470.154532] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e62fa25-d4c3-42ef-99ca-03f7837ec88d req-e7953b6c-c59f-4b8d-a813-1861238d132d service nova] Acquiring lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.154692] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e62fa25-d4c3-42ef-99ca-03f7837ec88d req-e7953b6c-c59f-4b8d-a813-1861238d132d service nova] Acquired lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.154866] env[62820]: DEBUG nova.network.neutron [req-3e62fa25-d4c3-42ef-99ca-03f7837ec88d req-e7953b6c-c59f-4b8d-a813-1861238d132d service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1470.291537] env[62820]: DEBUG nova.network.neutron [-] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.304120] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.427810] env[62820]: DEBUG nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1470.453545] env[62820]: DEBUG oslo_vmware.api [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.466725] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='72727140-45c5-4368-9f13-c12a8d0ec9dc',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2056238791',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1470.467030] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1470.467168] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1470.468902] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1470.468902] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1470.468902] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1470.468902] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1470.468902] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1470.468902] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1470.468902] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1470.469368] env[62820]: DEBUG nova.virt.hardware [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1470.474638] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfiguring VM instance instance-00000023 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1470.478065] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b41faf9-a62d-4bb0-9b3b-f7e07f69ff23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.491536] env[62820]: DEBUG oslo_concurrency.lockutils [req-0cb3daa8-c8f1-46e6-859f-bdc498010d3b req-cedc8fb0-bed0-482c-8a4b-213398762e96 service nova] Releasing lock "refresh_cache-b6c58867-914e-4e6e-8092-fc8991dc87f7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.502259] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1470.502259] env[62820]: value = "task-1695548" [ 1470.502259] env[62820]: _type = "Task" [ 1470.502259] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.516901] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695548, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.634753] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27586a2f-8c93-4d56-b0ed-0f42cdb99891 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.643548] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca99a62-1dbb-4164-98c8-0fe2c4f2f65c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.680545] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c7abf9-2424-402f-964b-36aa4573bf8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.689581] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c78526-8c59-4d2f-9fc9-fff94e2fd3e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.705655] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.780464] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d567a8c6-d83e-4069-94fd-0f80c7e5839d tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.348s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.801962] env[62820]: INFO nova.compute.manager [-] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Took 1.57 seconds to deallocate network for instance. [ 1470.812772] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.954309] env[62820]: DEBUG oslo_vmware.api [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.638566} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.954933] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1470.954933] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1470.955139] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1470.955370] env[62820]: INFO nova.compute.manager [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1470.955684] env[62820]: DEBUG oslo.service.loopingcall [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1470.955933] env[62820]: DEBUG nova.compute.manager [-] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1470.956071] env[62820]: DEBUG nova.network.neutron [-] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1470.968434] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.012903] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695548, 'name': ReconfigVM_Task, 'duration_secs': 0.293009} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.013203] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfigured VM instance instance-00000023 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1471.014016] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfec6002-0329-4716-8fc9-693c2f03c613 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.039548] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e/ab21fd61-3a44-42fa-92be-51214b0a9a1e.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1471.042018] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c94cd970-4643-4365-8caa-b89857521b44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.063065] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1471.063065] env[62820]: value = "task-1695549" [ 1471.063065] env[62820]: _type = "Task" [ 1471.063065] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.072095] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.153882] env[62820]: DEBUG nova.network.neutron [req-3e62fa25-d4c3-42ef-99ca-03f7837ec88d req-e7953b6c-c59f-4b8d-a813-1861238d132d service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updated VIF entry in instance network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1471.154377] env[62820]: DEBUG nova.network.neutron [req-3e62fa25-d4c3-42ef-99ca-03f7837ec88d req-e7953b6c-c59f-4b8d-a813-1861238d132d service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updating instance_info_cache with network_info: [{"id": "927b7951-0ef5-4aa5-b888-5b73266b6951", "address": "fa:16:3e:df:db:ef", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap927b7951-0e", "ovs_interfaceid": "927b7951-0ef5-4aa5-b888-5b73266b6951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.210226] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1471.306527] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.310959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.357048] env[62820]: DEBUG nova.compute.manager [req-58461584-242a-4e4e-bdcd-47138dde7c80 req-3f5895d9-cf2b-4277-b3cf-1d9ba9399bc2 service nova] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Received event network-vif-deleted-ac0c5d29-710c-41c1-9d17-a8a15f59cb82 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1471.574181] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695549, 'name': ReconfigVM_Task, 'duration_secs': 0.286237} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.574479] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfigured VM instance instance-00000023 to attach disk [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e/ab21fd61-3a44-42fa-92be-51214b0a9a1e.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1471.574753] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance 'ab21fd61-3a44-42fa-92be-51214b0a9a1e' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1471.657605] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e62fa25-d4c3-42ef-99ca-03f7837ec88d req-e7953b6c-c59f-4b8d-a813-1861238d132d service nova] Releasing lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.716274] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1471.716274] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.156s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.716274] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.900s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.716274] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.718741] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.444s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.719254] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.723185] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.157s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.723634] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.728255] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.573s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.729962] env[62820]: INFO nova.compute.claims [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1471.759157] env[62820]: INFO nova.scheduler.client.report [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Deleted allocations for instance 90ea0c16-739a-4132-ac36-e154a846b9c2 [ 1471.771118] env[62820]: INFO nova.scheduler.client.report [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Deleted allocations for instance 31639194-b0c4-4eb9-a6f4-e61b067c807f [ 1471.808845] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.876454] env[62820]: DEBUG nova.network.neutron [-] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.993200] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "7e4596bf-a8b0-4502-b80b-da372d1fba06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.993814] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "7e4596bf-a8b0-4502-b80b-da372d1fba06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.085385] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7057209-e45c-42c9-bfd5-030d41d3afd7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.118280] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0758a1-ff74-41fa-9173-7b09956f5329 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.138899] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance 'ab21fd61-3a44-42fa-92be-51214b0a9a1e' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1472.271052] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8de11ad7-f912-4a53-872f-941f0f3a51e7 tempest-ServerActionsV293TestJSON-866657218 tempest-ServerActionsV293TestJSON-866657218-project-member] Lock "4e4668ed-801a-4105-8b9e-cf37be91c8b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.100s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.272651] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68b728f9-1a89-480d-90b2-83a972534c8f tempest-VolumesAssistedSnapshotsTest-602859126 tempest-VolumesAssistedSnapshotsTest-602859126-project-member] Lock "90ea0c16-739a-4132-ac36-e154a846b9c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.399s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.279122] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a03e6d6b-5647-4624-8a54-9fbb36776f5d tempest-ServerExternalEventsTest-1210725869 tempest-ServerExternalEventsTest-1210725869-project-member] Lock "31639194-b0c4-4eb9-a6f4-e61b067c807f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.111s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.306430] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.315674] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.315674] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.379022] env[62820]: INFO nova.compute.manager [-] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Took 1.42 seconds to deallocate network for instance. [ 1472.496550] env[62820]: DEBUG nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1472.700549] env[62820]: DEBUG nova.network.neutron [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Port 5af0a5c5-a176-477e-b59a-fa82e9eea9a7 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1472.812273] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.832194] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.833583] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1472.887339] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.020334] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.219282] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d404da-f1a7-47b9-a786-08481a9c9aa1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.227960] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fadb1c5-793c-4de1-9ccb-778c5a9ffe8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.276287] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a7a4db-c2df-40d8-a0b2-b40f553fa977 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.285962] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c694cb4-68b0-47f2-a1f7-0196d3f5bdb5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.301975] env[62820]: DEBUG nova.compute.provider_tree [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1473.313419] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.549243] env[62820]: DEBUG nova.compute.manager [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Received event network-vif-deleted-c83c67d0-648f-4a10-b8a2-7e83e079d0f3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1473.549688] env[62820]: DEBUG nova.compute.manager [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Received event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1473.549688] env[62820]: DEBUG nova.compute.manager [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing instance network info cache due to event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1473.549803] env[62820]: DEBUG oslo_concurrency.lockutils [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] Acquiring lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.549932] env[62820]: DEBUG oslo_concurrency.lockutils [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] Acquired lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.551151] env[62820]: DEBUG nova.network.neutron [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1473.567208] env[62820]: DEBUG nova.compute.manager [req-91634547-3662-4f3d-8320-6b51b329d426 req-a10cd655-957b-4203-bca4-cb111a4a8ae4 service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Received event network-changed-243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1473.567402] env[62820]: DEBUG nova.compute.manager [req-91634547-3662-4f3d-8320-6b51b329d426 req-a10cd655-957b-4203-bca4-cb111a4a8ae4 service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Refreshing instance network info cache due to event network-changed-243136d0-94ab-4229-ba69-f9a74d65fcc0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1473.567624] env[62820]: DEBUG oslo_concurrency.lockutils [req-91634547-3662-4f3d-8320-6b51b329d426 req-a10cd655-957b-4203-bca4-cb111a4a8ae4 service nova] Acquiring lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.567770] env[62820]: DEBUG oslo_concurrency.lockutils [req-91634547-3662-4f3d-8320-6b51b329d426 req-a10cd655-957b-4203-bca4-cb111a4a8ae4 service nova] Acquired lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.568331] env[62820]: DEBUG nova.network.neutron [req-91634547-3662-4f3d-8320-6b51b329d426 req-a10cd655-957b-4203-bca4-cb111a4a8ae4 service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Refreshing network info cache for port 243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1473.728128] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.728936] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.729303] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.823848] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.834630] env[62820]: ERROR nova.scheduler.client.report [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [req-fb33f4ae-928b-473d-ba92-db171468a4d5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fb33f4ae-928b-473d-ba92-db171468a4d5"}]} [ 1473.855211] env[62820]: DEBUG nova.scheduler.client.report [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1473.876070] env[62820]: DEBUG nova.scheduler.client.report [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1473.877389] env[62820]: DEBUG nova.compute.provider_tree [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1473.890370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.890370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.891125] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1473.894141] env[62820]: DEBUG nova.scheduler.client.report [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1473.920772] env[62820]: DEBUG nova.scheduler.client.report [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1474.313967] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.360077] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e47a386-a439-4d44-b7b1-bedfea0c4567 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.370167] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd57db54-54eb-45f2-9190-2b0afdbd4d35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.400127] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "498236b7-3688-4ab1-a604-a9737ba058e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.400436] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.400704] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.400933] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.401200] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.405207] env[62820]: INFO nova.compute.manager [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Terminating instance [ 1474.407264] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d839bfa-ebd5-439d-9a2f-22ccfc0a3844 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.417897] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b5f49e-e9ca-4e7b-8f88-0495eb60f747 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.438813] env[62820]: DEBUG nova.compute.provider_tree [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1474.810689] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.810689] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.811077] env[62820]: DEBUG nova.network.neutron [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.820761] env[62820]: DEBUG oslo_vmware.api [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695543, 'name': ReconfigVM_Task, 'duration_secs': 5.833594} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.820761] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.820921] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Reconfigured VM to detach interface {{(pid=62820) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1474.853282] env[62820]: DEBUG nova.network.neutron [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updated VIF entry in instance network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1474.853673] env[62820]: DEBUG nova.network.neutron [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updating instance_info_cache with network_info: [{"id": "927b7951-0ef5-4aa5-b888-5b73266b6951", "address": "fa:16:3e:df:db:ef", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap927b7951-0e", "ovs_interfaceid": "927b7951-0ef5-4aa5-b888-5b73266b6951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.892408] env[62820]: DEBUG nova.network.neutron [req-91634547-3662-4f3d-8320-6b51b329d426 req-a10cd655-957b-4203-bca4-cb111a4a8ae4 service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updated VIF entry in instance network info cache for port 243136d0-94ab-4229-ba69-f9a74d65fcc0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1474.893263] env[62820]: DEBUG nova.network.neutron [req-91634547-3662-4f3d-8320-6b51b329d426 req-a10cd655-957b-4203-bca4-cb111a4a8ae4 service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updating instance_info_cache with network_info: [{"id": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "address": "fa:16:3e:ca:13:89", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap243136d0-94", "ovs_interfaceid": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.913037] env[62820]: DEBUG nova.compute.manager [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1474.913261] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1474.914401] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70a2f90-037c-489b-9192-1c358b3ab13f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.931344] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1474.931344] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-938b17d0-3596-4984-9413-ac9721df3d67 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.946601] env[62820]: DEBUG oslo_vmware.api [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1474.946601] env[62820]: value = "task-1695550" [ 1474.946601] env[62820]: _type = "Task" [ 1474.946601] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.964877] env[62820]: DEBUG oslo_vmware.api [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695550, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.997089] env[62820]: DEBUG nova.scheduler.client.report [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1474.997380] env[62820]: DEBUG nova.compute.provider_tree [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 72 to 73 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1474.997572] env[62820]: DEBUG nova.compute.provider_tree [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1475.305885] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Updating instance_info_cache with network_info: [{"id": "4a487d2d-4f2d-43bd-9691-dd7219d7b997", "address": "fa:16:3e:ca:e2:f6", "network": {"id": "7d2a19b1-b027-476f-a7e9-83d5e073c47c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1380728919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b9015dc7894a1d98bf0bb73bdf7636", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a487d2d-4f", "ovs_interfaceid": "4a487d2d-4f2d-43bd-9691-dd7219d7b997", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.357697] env[62820]: DEBUG oslo_concurrency.lockutils [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] Releasing lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.358051] env[62820]: DEBUG nova.compute.manager [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Received event network-changed-243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1475.358216] env[62820]: DEBUG nova.compute.manager [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Refreshing instance network info cache due to event network-changed-243136d0-94ab-4229-ba69-f9a74d65fcc0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1475.358418] env[62820]: DEBUG oslo_concurrency.lockutils [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] Acquiring lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.396253] env[62820]: DEBUG oslo_concurrency.lockutils [req-91634547-3662-4f3d-8320-6b51b329d426 req-a10cd655-957b-4203-bca4-cb111a4a8ae4 service nova] Releasing lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.396739] env[62820]: DEBUG oslo_concurrency.lockutils [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] Acquired lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.396967] env[62820]: DEBUG nova.network.neutron [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Refreshing network info cache for port 243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1475.460705] env[62820]: DEBUG oslo_vmware.api [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695550, 'name': PowerOffVM_Task, 'duration_secs': 0.243391} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.461662] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1475.461914] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1475.462257] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82c58c57-9bc7-4227-b2ad-fdc4258016f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.503806] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.775s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.504263] env[62820]: DEBUG nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1475.506974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.227s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.507191] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.512332] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.853s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.512332] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.512332] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.077s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.513423] env[62820]: INFO nova.compute.claims [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1475.545184] env[62820]: INFO nova.scheduler.client.report [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Deleted allocations for instance 262d0714-d7d7-443c-9927-ef03ba9f230e [ 1475.552830] env[62820]: INFO nova.scheduler.client.report [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Deleted allocations for instance 9910a0ea-5ce0-41e9-b449-da729a4c3223 [ 1475.567567] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1475.567567] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1475.567567] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Deleting the datastore file [datastore1] 498236b7-3688-4ab1-a604-a9737ba058e8 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1475.567567] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cbfcf8ac-f2e0-4e57-bb6b-becb2813fbb6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.575856] env[62820]: DEBUG oslo_vmware.api [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1475.575856] env[62820]: value = "task-1695552" [ 1475.575856] env[62820]: _type = "Task" [ 1475.575856] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.593400] env[62820]: DEBUG oslo_vmware.api [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.677389] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "207efed9-20ea-4b9e-bca2-45521b41de6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.677836] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "207efed9-20ea-4b9e-bca2-45521b41de6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.808681] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.808987] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 1475.810614] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.810949] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.811241] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.811368] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.812966] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.812966] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.812966] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1475.812966] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.820524] env[62820]: DEBUG nova.network.neutron [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance_info_cache with network_info: [{"id": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "address": "fa:16:3e:04:22:08", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af0a5c5-a1", "ovs_interfaceid": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.022808] env[62820]: DEBUG nova.compute.utils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1476.024732] env[62820]: DEBUG nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1476.025157] env[62820]: DEBUG nova.network.neutron [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1476.057393] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6c120efc-8aad-4614-bc30-76f0bc08214c tempest-ServersTestMultiNic-62380988 tempest-ServersTestMultiNic-62380988-project-member] Lock "262d0714-d7d7-443c-9927-ef03ba9f230e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.095s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.066995] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1d06f3f-f04e-494f-a4f2-eb6ecbbbeef9 tempest-ListImageFiltersTestJSON-1225692901 tempest-ListImageFiltersTestJSON-1225692901-project-member] Lock "9910a0ea-5ce0-41e9-b449-da729a4c3223" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.442s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.071499] env[62820]: DEBUG nova.compute.manager [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received event network-vif-deleted-2e2c8225-56f6-4223-a6ea-721acb71e411 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1476.071620] env[62820]: INFO nova.compute.manager [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Neutron deleted interface 2e2c8225-56f6-4223-a6ea-721acb71e411; detaching it from the instance and deleting it from the info cache [ 1476.072199] env[62820]: DEBUG nova.network.neutron [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updating instance_info_cache with network_info: [{"id": "e5e97928-d469-42c4-9621-ed449eeebf5c", "address": "fa:16:3e:ec:4d:db", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5e97928-d4", "ovs_interfaceid": "e5e97928-d469-42c4-9621-ed449eeebf5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.090953] env[62820]: DEBUG oslo_vmware.api [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157919} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.091403] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1476.091617] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1476.091823] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1476.091978] env[62820]: INFO nova.compute.manager [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1476.092234] env[62820]: DEBUG oslo.service.loopingcall [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.093751] env[62820]: DEBUG nova.policy [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e81a169ac4144a5bbc0a4e3a077cb4a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65abf73e789b48d3ba24e2660d7c0341', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1476.095559] env[62820]: DEBUG nova.compute.manager [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1476.095649] env[62820]: DEBUG nova.network.neutron [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1476.181171] env[62820]: DEBUG nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1476.214016] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.214199] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.214427] env[62820]: DEBUG nova.network.neutron [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1476.315975] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.323728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.441662] env[62820]: DEBUG nova.network.neutron [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updated VIF entry in instance network info cache for port 243136d0-94ab-4229-ba69-f9a74d65fcc0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1476.442162] env[62820]: DEBUG nova.network.neutron [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updating instance_info_cache with network_info: [{"id": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "address": "fa:16:3e:ca:13:89", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap243136d0-94", "ovs_interfaceid": "243136d0-94ab-4229-ba69-f9a74d65fcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.499392] env[62820]: DEBUG nova.network.neutron [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Successfully created port: d056da46-0da8-4d3b-b8a9-0255f08e1a3b {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1476.525862] env[62820]: DEBUG nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1476.579351] env[62820]: DEBUG oslo_concurrency.lockutils [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] Acquiring lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.579568] env[62820]: DEBUG oslo_concurrency.lockutils [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] Acquired lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.581112] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0a09b1-4fb4-4466-a69f-63d5d75c1fc4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.609302] env[62820]: DEBUG oslo_concurrency.lockutils [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] Releasing lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.609429] env[62820]: WARNING nova.compute.manager [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Detach interface failed, port_id=2e2c8225-56f6-4223-a6ea-721acb71e411, reason: No device with interface-id 2e2c8225-56f6-4223-a6ea-721acb71e411 exists on VM: nova.exception.NotFound: No device with interface-id 2e2c8225-56f6-4223-a6ea-721acb71e411 exists on VM [ 1476.609626] env[62820]: DEBUG nova.compute.manager [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Received event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1476.609796] env[62820]: DEBUG nova.compute.manager [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing instance network info cache due to event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1476.610035] env[62820]: DEBUG oslo_concurrency.lockutils [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] Acquiring lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.610174] env[62820]: DEBUG oslo_concurrency.lockutils [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] Acquired lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.610334] env[62820]: DEBUG nova.network.neutron [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1476.707740] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.765582] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.765974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.766368] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.766573] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.766758] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.769546] env[62820]: INFO nova.compute.manager [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Terminating instance [ 1476.846326] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00929e30-7621-48a7-a80d-aa2787086153 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.871659] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1737495-a0eb-4490-8fb3-6f3daf3e27c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.879490] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance 'ab21fd61-3a44-42fa-92be-51214b0a9a1e' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1476.945537] env[62820]: DEBUG oslo_concurrency.lockutils [req-06502b7f-ba26-4a5f-8324-f681fc34edd7 req-1fc7656b-e099-47e6-9f5d-5cd5a136b5ad service nova] Releasing lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.968037] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c72c25b-c173-49e8-9511-6da8b7ac9db6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.976101] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a5854e-887d-404f-9bd4-6e25a44cd28c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.009531] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6c0aaa-b1d7-4c94-aa8d-7a3d5ff51e89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.018404] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46218bb9-af86-4ca4-b25e-c62b40fd6e96 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.043068] env[62820]: DEBUG nova.compute.provider_tree [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.274283] env[62820]: DEBUG nova.compute.manager [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1477.274639] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.275535] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d01f5f7-53df-41f1-83b4-3b74fd38e115 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.288960] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.289243] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51233a2e-de19-44cc-a91b-bfc467b3c0b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.298382] env[62820]: DEBUG oslo_vmware.api [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1477.298382] env[62820]: value = "task-1695553" [ 1477.298382] env[62820]: _type = "Task" [ 1477.298382] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.309456] env[62820]: DEBUG oslo_vmware.api [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695553, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.387466] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1477.387776] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c916b4ff-43de-4dd9-ae9f-1a471cf5b89a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.401759] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1477.401759] env[62820]: value = "task-1695554" [ 1477.401759] env[62820]: _type = "Task" [ 1477.401759] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.417061] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695554, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.546501] env[62820]: DEBUG nova.scheduler.client.report [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1477.554251] env[62820]: DEBUG nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1477.560437] env[62820]: DEBUG nova.network.neutron [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updating instance_info_cache with network_info: [{"id": "e5e97928-d469-42c4-9621-ed449eeebf5c", "address": "fa:16:3e:ec:4d:db", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5e97928-d4", "ovs_interfaceid": "e5e97928-d469-42c4-9621-ed449eeebf5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.586877] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1477.586877] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1477.586877] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1477.586877] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1477.589222] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1477.589222] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1477.589222] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1477.589222] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1477.589222] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1477.589222] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1477.589222] env[62820]: DEBUG nova.virt.hardware [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1477.591393] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7759f60f-e3e9-4660-8fb3-fd44ffe41f7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.603426] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ceb4c6-3919-41b6-82a9-0d83271235fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.619041] env[62820]: DEBUG nova.network.neutron [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.644115] env[62820]: DEBUG nova.network.neutron [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updated VIF entry in instance network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1477.644632] env[62820]: DEBUG nova.network.neutron [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updating instance_info_cache with network_info: [{"id": "927b7951-0ef5-4aa5-b888-5b73266b6951", "address": "fa:16:3e:df:db:ef", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap927b7951-0e", "ovs_interfaceid": "927b7951-0ef5-4aa5-b888-5b73266b6951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.817783] env[62820]: DEBUG oslo_vmware.api [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695553, 'name': PowerOffVM_Task, 'duration_secs': 0.263717} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.818260] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1477.818586] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1477.818994] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36b05156-2d2e-4830-9a40-a53be15da219 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.903388] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1477.903388] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1477.903710] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleting the datastore file [datastore1] bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1477.911390] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95412dc1-bded-4ed1-922b-f1e6de0497f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.922892] env[62820]: DEBUG oslo_vmware.api [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695554, 'name': PowerOnVM_Task, 'duration_secs': 0.420945} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.924742] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1477.924968] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c0028-e034-4c30-a20e-bf55fbc0f75b tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance 'ab21fd61-3a44-42fa-92be-51214b0a9a1e' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1477.929662] env[62820]: DEBUG oslo_vmware.api [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1477.929662] env[62820]: value = "task-1695556" [ 1477.929662] env[62820]: _type = "Task" [ 1477.929662] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.932348] env[62820]: DEBUG nova.compute.manager [req-f8f979fc-e6a1-4587-b3f0-8a0663b2622d req-6ae87c8c-6fee-4b55-93ac-05dae636c44b service nova] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Received event network-vif-deleted-243136d0-94ab-4229-ba69-f9a74d65fcc0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1477.943440] env[62820]: DEBUG oslo_vmware.api [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.057558] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.058835] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.610s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.060767] env[62820]: INFO nova.compute.claims [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1478.068032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.121422] env[62820]: INFO nova.compute.manager [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Took 2.03 seconds to deallocate network for instance. [ 1478.147696] env[62820]: DEBUG oslo_concurrency.lockutils [req-c7cca348-cbd2-4999-b840-2eb02503440e req-e76dbbfc-5ccb-45b9-b920-38ebab2c2c74 service nova] Releasing lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.328435] env[62820]: DEBUG nova.compute.manager [req-f304ecb4-ebda-47e9-bfe6-ba84b4d1cb60 req-6a722cfd-6e67-4983-ae8f-aba736871b41 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Received event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1478.329598] env[62820]: DEBUG nova.compute.manager [req-f304ecb4-ebda-47e9-bfe6-ba84b4d1cb60 req-6a722cfd-6e67-4983-ae8f-aba736871b41 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing instance network info cache due to event network-changed-927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1478.329598] env[62820]: DEBUG oslo_concurrency.lockutils [req-f304ecb4-ebda-47e9-bfe6-ba84b4d1cb60 req-6a722cfd-6e67-4983-ae8f-aba736871b41 service nova] Acquiring lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.329598] env[62820]: DEBUG oslo_concurrency.lockutils [req-f304ecb4-ebda-47e9-bfe6-ba84b4d1cb60 req-6a722cfd-6e67-4983-ae8f-aba736871b41 service nova] Acquired lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.329598] env[62820]: DEBUG nova.network.neutron [req-f304ecb4-ebda-47e9-bfe6-ba84b4d1cb60 req-6a722cfd-6e67-4983-ae8f-aba736871b41 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Refreshing network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.449288] env[62820]: DEBUG oslo_vmware.api [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203573} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.450298] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.451067] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1478.451067] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1478.451304] env[62820]: INFO nova.compute.manager [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1478.451661] env[62820]: DEBUG oslo.service.loopingcall [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.452434] env[62820]: DEBUG nova.compute.manager [-] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1478.452656] env[62820]: DEBUG nova.network.neutron [-] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1478.567568] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "cf7fe22f-18ef-4e51-9fae-9b096fba6683" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.567568] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "cf7fe22f-18ef-4e51-9fae-9b096fba6683" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.573542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0200a890-c26b-4cce-b573-0b626326859d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c-2e2c8225-56f6-4223-a6ea-721acb71e411" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.387s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.589025] env[62820]: DEBUG nova.network.neutron [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Successfully updated port: d056da46-0da8-4d3b-b8a9-0255f08e1a3b {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1478.631051] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.811314] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "498236b7-3688-4ab1-a604-a9737ba058e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.073495] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "cf7fe22f-18ef-4e51-9fae-9b096fba6683" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.507s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.074134] env[62820]: DEBUG nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1479.091797] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-09ab63ae-fd36-4915-8c59-9d9bc5833288" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.091797] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-09ab63ae-fd36-4915-8c59-9d9bc5833288" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.091797] env[62820]: DEBUG nova.network.neutron [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1479.502009] env[62820]: DEBUG nova.network.neutron [req-f304ecb4-ebda-47e9-bfe6-ba84b4d1cb60 req-6a722cfd-6e67-4983-ae8f-aba736871b41 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updated VIF entry in instance network info cache for port 927b7951-0ef5-4aa5-b888-5b73266b6951. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.502415] env[62820]: DEBUG nova.network.neutron [req-f304ecb4-ebda-47e9-bfe6-ba84b4d1cb60 req-6a722cfd-6e67-4983-ae8f-aba736871b41 service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updating instance_info_cache with network_info: [{"id": "927b7951-0ef5-4aa5-b888-5b73266b6951", "address": "fa:16:3e:df:db:ef", "network": {"id": "39da12cc-47f2-4ffa-8e7f-cb0b10979144", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1266699781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bfd16891a3f453da8583d65051a2afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap927b7951-0e", "ovs_interfaceid": "927b7951-0ef5-4aa5-b888-5b73266b6951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.553649] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac414d22-093e-4169-8724-81eef0d80971 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.563401] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0717e51a-33d3-4049-b964-15712375b3c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.596333] env[62820]: DEBUG nova.compute.utils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1479.600180] env[62820]: DEBUG nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1479.600373] env[62820]: DEBUG nova.network.neutron [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1479.602367] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e7fd58-c7bf-4634-8be8-8bfa55faea35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.613622] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebed40e-1666-4e41-95b4-6a16df0af99b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.629980] env[62820]: DEBUG nova.compute.provider_tree [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1479.667077] env[62820]: DEBUG nova.network.neutron [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1479.747464] env[62820]: DEBUG nova.network.neutron [-] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.907277] env[62820]: DEBUG nova.policy [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f1637bb20d24804a06b8bdcf3e76fbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29c5fc13025a4c8c8c6a0b2624238de6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1479.952975] env[62820]: DEBUG nova.network.neutron [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Updating instance_info_cache with network_info: [{"id": "d056da46-0da8-4d3b-b8a9-0255f08e1a3b", "address": "fa:16:3e:a4:a7:8d", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd056da46-0d", "ovs_interfaceid": "d056da46-0da8-4d3b-b8a9-0255f08e1a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.005126] env[62820]: DEBUG oslo_concurrency.lockutils [req-f304ecb4-ebda-47e9-bfe6-ba84b4d1cb60 req-6a722cfd-6e67-4983-ae8f-aba736871b41 service nova] Releasing lock "refresh_cache-aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.103279] env[62820]: DEBUG nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1480.121754] env[62820]: DEBUG nova.compute.manager [req-545218e4-98d6-416a-9b13-83da0ad16602 req-4031ba22-5efb-4853-b318-6fe16180165f service nova] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Received event network-vif-deleted-e5e97928-d469-42c4-9621-ed449eeebf5c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1480.132799] env[62820]: DEBUG nova.scheduler.client.report [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1480.251490] env[62820]: INFO nova.compute.manager [-] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Took 1.80 seconds to deallocate network for instance. [ 1480.336885] env[62820]: DEBUG nova.network.neutron [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Port 5af0a5c5-a176-477e-b59a-fa82e9eea9a7 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1480.337182] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.338058] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.338250] env[62820]: DEBUG nova.network.neutron [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1480.357714] env[62820]: DEBUG nova.compute.manager [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Received event network-vif-plugged-d056da46-0da8-4d3b-b8a9-0255f08e1a3b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1480.357714] env[62820]: DEBUG oslo_concurrency.lockutils [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] Acquiring lock "09ab63ae-fd36-4915-8c59-9d9bc5833288-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.357714] env[62820]: DEBUG oslo_concurrency.lockutils [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.357714] env[62820]: DEBUG oslo_concurrency.lockutils [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.357714] env[62820]: DEBUG nova.compute.manager [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] No waiting events found dispatching network-vif-plugged-d056da46-0da8-4d3b-b8a9-0255f08e1a3b {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1480.357714] env[62820]: WARNING nova.compute.manager [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Received unexpected event network-vif-plugged-d056da46-0da8-4d3b-b8a9-0255f08e1a3b for instance with vm_state building and task_state spawning. [ 1480.357714] env[62820]: DEBUG nova.compute.manager [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Received event network-changed-d056da46-0da8-4d3b-b8a9-0255f08e1a3b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1480.357714] env[62820]: DEBUG nova.compute.manager [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Refreshing instance network info cache due to event network-changed-d056da46-0da8-4d3b-b8a9-0255f08e1a3b. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1480.357714] env[62820]: DEBUG oslo_concurrency.lockutils [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] Acquiring lock "refresh_cache-09ab63ae-fd36-4915-8c59-9d9bc5833288" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.458851] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-09ab63ae-fd36-4915-8c59-9d9bc5833288" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.458851] env[62820]: DEBUG nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Instance network_info: |[{"id": "d056da46-0da8-4d3b-b8a9-0255f08e1a3b", "address": "fa:16:3e:a4:a7:8d", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd056da46-0d", "ovs_interfaceid": "d056da46-0da8-4d3b-b8a9-0255f08e1a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1480.458851] env[62820]: DEBUG oslo_concurrency.lockutils [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] Acquired lock "refresh_cache-09ab63ae-fd36-4915-8c59-9d9bc5833288" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.458851] env[62820]: DEBUG nova.network.neutron [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Refreshing network info cache for port d056da46-0da8-4d3b-b8a9-0255f08e1a3b {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1480.459940] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:a7:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd056da46-0da8-4d3b-b8a9-0255f08e1a3b', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1480.475442] env[62820]: DEBUG oslo.service.loopingcall [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.476055] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1480.477471] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-938c938a-ff13-459c-8b7e-9d2ce83aea89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.509476] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1480.509476] env[62820]: value = "task-1695557" [ 1480.509476] env[62820]: _type = "Task" [ 1480.509476] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.518694] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695557, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.561585] env[62820]: DEBUG nova.network.neutron [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Successfully created port: 917cc76e-f48e-4b85-8eb5-554dffb05814 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1480.643591] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.643591] env[62820]: DEBUG nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1480.647286] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.194s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.648667] env[62820]: INFO nova.compute.claims [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.762580] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.026539] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695557, 'name': CreateVM_Task, 'duration_secs': 0.506407} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.028025] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1481.028304] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.031060] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.031060] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1481.031060] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9551d1f1-b258-4efe-bc80-adee60d72222 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.035860] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1481.035860] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52307fe2-9ea9-dd01-9d34-3f476e548a0e" [ 1481.035860] env[62820]: _type = "Task" [ 1481.035860] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.044670] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52307fe2-9ea9-dd01-9d34-3f476e548a0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.117819] env[62820]: DEBUG nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1481.150736] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1481.150996] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1481.151176] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1481.151369] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1481.151521] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1481.151697] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1481.151924] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1481.152104] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1481.152281] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1481.152542] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1481.152730] env[62820]: DEBUG nova.virt.hardware [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1481.154364] env[62820]: DEBUG nova.compute.utils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1481.159223] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab99820c-665c-4b8c-80bc-6c3c43ced9bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.166482] env[62820]: DEBUG nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1481.166560] env[62820]: DEBUG nova.network.neutron [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1481.175869] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a662b784-8c65-447c-bcb3-1b9c079ee883 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.283477] env[62820]: DEBUG nova.policy [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8471663f47dd4c06a3ccc6fa790d1003', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '552a99ce13514c8fbe1858ce4d6cd1f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1481.362249] env[62820]: DEBUG nova.network.neutron [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance_info_cache with network_info: [{"id": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "address": "fa:16:3e:04:22:08", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af0a5c5-a1", "ovs_interfaceid": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.545946] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52307fe2-9ea9-dd01-9d34-3f476e548a0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.585987] env[62820]: DEBUG nova.network.neutron [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Updated VIF entry in instance network info cache for port d056da46-0da8-4d3b-b8a9-0255f08e1a3b. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1481.586393] env[62820]: DEBUG nova.network.neutron [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Updating instance_info_cache with network_info: [{"id": "d056da46-0da8-4d3b-b8a9-0255f08e1a3b", "address": "fa:16:3e:a4:a7:8d", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd056da46-0d", "ovs_interfaceid": "d056da46-0da8-4d3b-b8a9-0255f08e1a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.669659] env[62820]: DEBUG nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1481.865913] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.973286] env[62820]: DEBUG nova.network.neutron [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Successfully created port: cc5ea8f1-ee22-4097-b07f-9a5183bdb994 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1482.052118] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52307fe2-9ea9-dd01-9d34-3f476e548a0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.090556] env[62820]: DEBUG oslo_concurrency.lockutils [req-b01f3899-4ccd-4048-91cd-6fa262051dee req-ef3a4e6e-e0b4-44c7-a64a-f492f57645f3 service nova] Releasing lock "refresh_cache-09ab63ae-fd36-4915-8c59-9d9bc5833288" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.150388] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97aa0c23-0c3d-4226-9d38-a50ae8028a01 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.161273] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2932162f-4c38-4a62-9b11-9ce985f95f40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.201388] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0391517-57ba-4f95-b3e2-5d1fb1f4438d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.210890] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1acfa1-9be6-4fae-87a6-996d8544abdb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.235505] env[62820]: DEBUG nova.compute.provider_tree [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.368864] env[62820]: DEBUG nova.compute.manager [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62820) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1482.369130] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.550942] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52307fe2-9ea9-dd01-9d34-3f476e548a0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.669041] env[62820]: DEBUG nova.compute.manager [req-4238885f-e7fe-4014-88d5-6af56a897e6d req-9730613b-9c89-4bc2-b6db-7809d1ca468e service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Received event network-vif-plugged-917cc76e-f48e-4b85-8eb5-554dffb05814 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1482.669283] env[62820]: DEBUG oslo_concurrency.lockutils [req-4238885f-e7fe-4014-88d5-6af56a897e6d req-9730613b-9c89-4bc2-b6db-7809d1ca468e service nova] Acquiring lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.669490] env[62820]: DEBUG oslo_concurrency.lockutils [req-4238885f-e7fe-4014-88d5-6af56a897e6d req-9730613b-9c89-4bc2-b6db-7809d1ca468e service nova] Lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.669651] env[62820]: DEBUG oslo_concurrency.lockutils [req-4238885f-e7fe-4014-88d5-6af56a897e6d req-9730613b-9c89-4bc2-b6db-7809d1ca468e service nova] Lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.669821] env[62820]: DEBUG nova.compute.manager [req-4238885f-e7fe-4014-88d5-6af56a897e6d req-9730613b-9c89-4bc2-b6db-7809d1ca468e service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] No waiting events found dispatching network-vif-plugged-917cc76e-f48e-4b85-8eb5-554dffb05814 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1482.669980] env[62820]: WARNING nova.compute.manager [req-4238885f-e7fe-4014-88d5-6af56a897e6d req-9730613b-9c89-4bc2-b6db-7809d1ca468e service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Received unexpected event network-vif-plugged-917cc76e-f48e-4b85-8eb5-554dffb05814 for instance with vm_state building and task_state spawning. [ 1482.702034] env[62820]: DEBUG nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1482.740810] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1482.742020] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1482.742696] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1482.742786] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1482.742951] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1482.743122] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1482.743376] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1482.743548] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1482.743781] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1482.743994] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1482.744213] env[62820]: DEBUG nova.virt.hardware [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1482.747444] env[62820]: DEBUG nova.scheduler.client.report [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1482.755457] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd8546b-6c81-4172-8b0e-e361ef41eeb2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.766282] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97662736-2452-4b79-99de-99c3de3acda4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.786169] env[62820]: DEBUG nova.network.neutron [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Successfully updated port: 917cc76e-f48e-4b85-8eb5-554dffb05814 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1483.049277] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52307fe2-9ea9-dd01-9d34-3f476e548a0e, 'name': SearchDatastore_Task, 'duration_secs': 1.686072} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.050110] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.050110] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1483.050496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.050496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.050701] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1483.050891] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e25d3e3b-0fbd-49ff-b7c4-1a84f3f7a607 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.061219] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1483.061609] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1483.062384] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3163df9f-af27-435a-85ea-0580b628c2e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.071353] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1483.071353] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52291624-02fb-5f11-3734-0f076cb10dfc" [ 1483.071353] env[62820]: _type = "Task" [ 1483.071353] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.090700] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52291624-02fb-5f11-3734-0f076cb10dfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.260111] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.260775] env[62820]: DEBUG nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1483.264464] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.252s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.264464] env[62820]: DEBUG nova.objects.instance [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1483.291750] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "refresh_cache-3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.291895] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquired lock "refresh_cache-3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.292073] env[62820]: DEBUG nova.network.neutron [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1483.583221] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52291624-02fb-5f11-3734-0f076cb10dfc, 'name': SearchDatastore_Task, 'duration_secs': 0.017622} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.584061] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-723c1e81-0541-4474-8c6b-d6d90a9b16f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.590544] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1483.590544] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5236bde4-177e-1b03-cd18-20fecf3c8f43" [ 1483.590544] env[62820]: _type = "Task" [ 1483.590544] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.601591] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5236bde4-177e-1b03-cd18-20fecf3c8f43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.769154] env[62820]: DEBUG nova.compute.utils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1483.775612] env[62820]: DEBUG nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1483.775864] env[62820]: DEBUG nova.network.neutron [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1483.903655] env[62820]: DEBUG nova.network.neutron [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1484.002417] env[62820]: DEBUG nova.policy [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed4cdce03e824de4bb39cc042c40dfd8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee1a42e497e94154ac770cb4c9d0456a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1484.105866] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5236bde4-177e-1b03-cd18-20fecf3c8f43, 'name': SearchDatastore_Task, 'duration_secs': 0.029867} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.105866] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.105866] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 09ab63ae-fd36-4915-8c59-9d9bc5833288/09ab63ae-fd36-4915-8c59-9d9bc5833288.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1484.105866] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b17f80dc-d6ee-48f1-b07e-4b4a64b7e3b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.117981] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1484.117981] env[62820]: value = "task-1695558" [ 1484.117981] env[62820]: _type = "Task" [ 1484.117981] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.128099] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.276324] env[62820]: DEBUG nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1484.289030] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b626189-40ae-4822-bc24-969e2f9cb8ab tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.289030] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.605s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.289030] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.294176] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.179s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.294613] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.004s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.297415] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.440s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.299460] env[62820]: INFO nova.compute.claims [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1484.341417] env[62820]: INFO nova.scheduler.client.report [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Deleted allocations for instance 519c961c-557e-4796-88da-047c55d6be44 [ 1484.346269] env[62820]: INFO nova.scheduler.client.report [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Deleted allocations for instance 0eb62424-0ee6-4ff4-94c2-bb6a10861759 [ 1484.609941] env[62820]: DEBUG nova.network.neutron [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Updating instance_info_cache with network_info: [{"id": "917cc76e-f48e-4b85-8eb5-554dffb05814", "address": "fa:16:3e:1a:26:cf", "network": {"id": "1fd446c1-45db-4e9e-875a-583467aebcd0", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1414213738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29c5fc13025a4c8c8c6a0b2624238de6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap917cc76e-f4", "ovs_interfaceid": "917cc76e-f48e-4b85-8eb5-554dffb05814", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.628040] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695558, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.629709] env[62820]: DEBUG nova.network.neutron [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Successfully updated port: cc5ea8f1-ee22-4097-b07f-9a5183bdb994 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1484.720328] env[62820]: DEBUG nova.compute.manager [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Received event network-changed-917cc76e-f48e-4b85-8eb5-554dffb05814 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1484.720533] env[62820]: DEBUG nova.compute.manager [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Refreshing instance network info cache due to event network-changed-917cc76e-f48e-4b85-8eb5-554dffb05814. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1484.720767] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Acquiring lock "refresh_cache-3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.785944] env[62820]: INFO nova.virt.block_device [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Booting with volume 952365ce-b448-4f14-9fc5-f9b7c5180d39 at /dev/sda [ 1484.832149] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d2ab581-9dd5-471d-87fe-eca62a159b2d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.844403] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55905e59-efe2-4976-b28e-1032b2219345 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.861168] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c991c33-1390-4cc9-b3af-50367bf9154b tempest-ServersTestFqdnHostnames-1922088013 tempest-ServersTestFqdnHostnames-1922088013-project-member] Lock "519c961c-557e-4796-88da-047c55d6be44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.225s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.863193] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a60b483-2b4d-43f6-abfe-0c3dbe0c546e tempest-ServersTestJSON-1010250127 tempest-ServersTestJSON-1010250127-project-member] Lock "0eb62424-0ee6-4ff4-94c2-bb6a10861759" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.076s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.890560] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55a72d52-cbda-4c04-ae45-8260182418c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.900623] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b88d2ce-69f9-4551-9ab5-8ce4245070d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.935367] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58f462f-8353-4a09-b02f-065928ecf6c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.944206] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c93ff19-4aec-43d7-93b5-fe7063f0bb71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.962566] env[62820]: DEBUG nova.virt.block_device [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Updating existing volume attachment record: e6ac3597-66d3-484f-9384-4a677241353b {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1485.110710] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Releasing lock "refresh_cache-3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.111212] env[62820]: DEBUG nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Instance network_info: |[{"id": "917cc76e-f48e-4b85-8eb5-554dffb05814", "address": "fa:16:3e:1a:26:cf", "network": {"id": "1fd446c1-45db-4e9e-875a-583467aebcd0", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1414213738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29c5fc13025a4c8c8c6a0b2624238de6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap917cc76e-f4", "ovs_interfaceid": "917cc76e-f48e-4b85-8eb5-554dffb05814", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1485.111419] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Acquired lock "refresh_cache-3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.111637] env[62820]: DEBUG nova.network.neutron [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Refreshing network info cache for port 917cc76e-f48e-4b85-8eb5-554dffb05814 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1485.112973] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:26:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '917cc76e-f48e-4b85-8eb5-554dffb05814', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1485.126398] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Creating folder: Project (29c5fc13025a4c8c8c6a0b2624238de6). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1485.126889] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94f72f10-fbd8-4063-a284-388ee59a92b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.136567] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquiring lock "refresh_cache-56c371a9-983f-4d5f-8abf-0183736c374c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.136567] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquired lock "refresh_cache-56c371a9-983f-4d5f-8abf-0183736c374c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.136812] env[62820]: DEBUG nova.network.neutron [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.142095] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.753333} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.142095] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 09ab63ae-fd36-4915-8c59-9d9bc5833288/09ab63ae-fd36-4915-8c59-9d9bc5833288.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1485.146533] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1485.148259] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c19dea6b-6b31-41f8-abbe-59e34eec415c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.150558] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Created folder: Project (29c5fc13025a4c8c8c6a0b2624238de6) in parent group-v353379. [ 1485.150756] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Creating folder: Instances. Parent ref: group-v353523. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1485.151565] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-746377a4-7dfc-4213-ad34-221b93bc5d95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.160277] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1485.160277] env[62820]: value = "task-1695560" [ 1485.160277] env[62820]: _type = "Task" [ 1485.160277] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.169376] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Created folder: Instances in parent group-v353523. [ 1485.169614] env[62820]: DEBUG oslo.service.loopingcall [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1485.173257] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1485.174443] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695560, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.174443] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b023db5-394a-47d6-9bc3-e3da1ea09477 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.201095] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1485.201095] env[62820]: value = "task-1695562" [ 1485.201095] env[62820]: _type = "Task" [ 1485.201095] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.210921] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695562, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.335407] env[62820]: DEBUG nova.network.neutron [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Successfully created port: cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1485.673859] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695560, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080393} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.678074] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1485.678706] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa19709-607d-4d81-bcdb-bf43260d686c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.715700] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 09ab63ae-fd36-4915-8c59-9d9bc5833288/09ab63ae-fd36-4915-8c59-9d9bc5833288.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1485.720057] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61e2c6ff-2e81-4ab3-96aa-93211a1e962f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.746135] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695562, 'name': CreateVM_Task, 'duration_secs': 0.429961} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.748116] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1485.748669] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1485.748669] env[62820]: value = "task-1695563" [ 1485.748669] env[62820]: _type = "Task" [ 1485.748669] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.749385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.749553] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.749913] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1485.752943] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3ec5a9e-d69c-43f4-aa73-45785678ac4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.772813] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1485.772813] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52fe42be-35ea-1cc5-2692-0b5ae83e959c" [ 1485.772813] env[62820]: _type = "Task" [ 1485.772813] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.774461] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695563, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.775421] env[62820]: DEBUG nova.network.neutron [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1485.791530] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fe42be-35ea-1cc5-2692-0b5ae83e959c, 'name': SearchDatastore_Task, 'duration_secs': 0.017529} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.791530] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.791810] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1485.792050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.792210] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.792399] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1485.792640] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2782c1d4-c093-415b-9835-2c8b516bd1ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.809933] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1485.810180] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1485.812266] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0ae177b-7ced-45fc-a035-289e3598a8f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.817888] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1485.817888] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5239a5d3-b2ca-12f7-ab5b-d541d7843c41" [ 1485.817888] env[62820]: _type = "Task" [ 1485.817888] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.827490] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5239a5d3-b2ca-12f7-ab5b-d541d7843c41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.852321] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6932a157-040b-40a8-9a0a-3239a377d26b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.860432] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0913658b-caac-4122-b3d3-09835d78e13f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.894730] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62609d5a-e07b-4366-a2c6-ed12aba6de43 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.904420] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d9e144-4e9a-4b96-87db-93b966d3f141 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.919989] env[62820]: DEBUG nova.compute.provider_tree [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1486.265139] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695563, 'name': ReconfigVM_Task, 'duration_secs': 0.285847} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.265532] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 09ab63ae-fd36-4915-8c59-9d9bc5833288/09ab63ae-fd36-4915-8c59-9d9bc5833288.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1486.266366] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3deed695-8d1a-42ab-8007-9e91c649c9fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.275155] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1486.275155] env[62820]: value = "task-1695564" [ 1486.275155] env[62820]: _type = "Task" [ 1486.275155] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.287727] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695564, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.330486] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5239a5d3-b2ca-12f7-ab5b-d541d7843c41, 'name': SearchDatastore_Task, 'duration_secs': 0.015532} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.331840] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d706b5d9-a931-48b3-88c4-22f243e3c699 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.339430] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1486.339430] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bb6dc8-b7e9-1d16-b360-106192fe4315" [ 1486.339430] env[62820]: _type = "Task" [ 1486.339430] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.353159] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bb6dc8-b7e9-1d16-b360-106192fe4315, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.424237] env[62820]: DEBUG nova.scheduler.client.report [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1486.570694] env[62820]: DEBUG nova.network.neutron [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Updating instance_info_cache with network_info: [{"id": "cc5ea8f1-ee22-4097-b07f-9a5183bdb994", "address": "fa:16:3e:bc:bf:4b", "network": {"id": "dee74285-f5bf-48b9-b83a-04a27d2cc747", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-193696357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "552a99ce13514c8fbe1858ce4d6cd1f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc5ea8f1-ee", "ovs_interfaceid": "cc5ea8f1-ee22-4097-b07f-9a5183bdb994", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.668725] env[62820]: DEBUG nova.network.neutron [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Updated VIF entry in instance network info cache for port 917cc76e-f48e-4b85-8eb5-554dffb05814. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1486.669137] env[62820]: DEBUG nova.network.neutron [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Updating instance_info_cache with network_info: [{"id": "917cc76e-f48e-4b85-8eb5-554dffb05814", "address": "fa:16:3e:1a:26:cf", "network": {"id": "1fd446c1-45db-4e9e-875a-583467aebcd0", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1414213738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29c5fc13025a4c8c8c6a0b2624238de6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap917cc76e-f4", "ovs_interfaceid": "917cc76e-f48e-4b85-8eb5-554dffb05814", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.789025] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695564, 'name': Rename_Task, 'duration_secs': 0.362093} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.789025] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1486.790259] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d017947-4e81-4586-a148-dcba23568821 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.798780] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1486.798780] env[62820]: value = "task-1695565" [ 1486.798780] env[62820]: _type = "Task" [ 1486.798780] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.809234] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695565, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.855009] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bb6dc8-b7e9-1d16-b360-106192fe4315, 'name': SearchDatastore_Task, 'duration_secs': 0.014914} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.855113] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.855355] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae/3fc55bd7-48b9-4e02-af19-f186f5d0c9ae.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1486.855619] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42f6d959-2802-409a-b1fe-171fb12dd8cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.865088] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1486.865088] env[62820]: value = "task-1695566" [ 1486.865088] env[62820]: _type = "Task" [ 1486.865088] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.875570] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.932503] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.932719] env[62820]: DEBUG nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1486.936663] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.504s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.939613] env[62820]: INFO nova.compute.claims [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1487.074027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Releasing lock "refresh_cache-56c371a9-983f-4d5f-8abf-0183736c374c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.074027] env[62820]: DEBUG nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Instance network_info: |[{"id": "cc5ea8f1-ee22-4097-b07f-9a5183bdb994", "address": "fa:16:3e:bc:bf:4b", "network": {"id": "dee74285-f5bf-48b9-b83a-04a27d2cc747", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-193696357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "552a99ce13514c8fbe1858ce4d6cd1f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc5ea8f1-ee", "ovs_interfaceid": "cc5ea8f1-ee22-4097-b07f-9a5183bdb994", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1487.075193] env[62820]: DEBUG nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1487.075678] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1487.075878] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1487.076454] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1487.076454] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1487.076454] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1487.076667] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1487.077468] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1487.077468] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1487.077468] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1487.077468] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1487.077717] env[62820]: DEBUG nova.virt.hardware [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1487.077989] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:bf:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc5ea8f1-ee22-4097-b07f-9a5183bdb994', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1487.086266] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Creating folder: Project (552a99ce13514c8fbe1858ce4d6cd1f7). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1487.087384] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931a0aad-a012-4650-9037-bf95802c838b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.090423] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40a97770-7a0b-4362-a44a-7a082651304a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.099214] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed86271b-b4d3-4b5f-9c91-e13221d8d9a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.104800] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Created folder: Project (552a99ce13514c8fbe1858ce4d6cd1f7) in parent group-v353379. [ 1487.105017] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Creating folder: Instances. Parent ref: group-v353526. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1487.105732] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97925e72-726b-4d1c-9c35-f7c664dd84fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.118672] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Created folder: Instances in parent group-v353526. [ 1487.118939] env[62820]: DEBUG oslo.service.loopingcall [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1487.119159] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1487.119377] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7254edb-997d-499f-bf90-bc25fe794320 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.141683] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1487.141683] env[62820]: value = "task-1695569" [ 1487.141683] env[62820]: _type = "Task" [ 1487.141683] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.153227] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695569, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.176377] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Releasing lock "refresh_cache-3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.177082] env[62820]: DEBUG nova.compute.manager [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Received event network-vif-plugged-cc5ea8f1-ee22-4097-b07f-9a5183bdb994 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1487.177424] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Acquiring lock "56c371a9-983f-4d5f-8abf-0183736c374c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.177669] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Lock "56c371a9-983f-4d5f-8abf-0183736c374c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.178062] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Lock "56c371a9-983f-4d5f-8abf-0183736c374c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.178275] env[62820]: DEBUG nova.compute.manager [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] No waiting events found dispatching network-vif-plugged-cc5ea8f1-ee22-4097-b07f-9a5183bdb994 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1487.178830] env[62820]: WARNING nova.compute.manager [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Received unexpected event network-vif-plugged-cc5ea8f1-ee22-4097-b07f-9a5183bdb994 for instance with vm_state building and task_state spawning. [ 1487.179046] env[62820]: DEBUG nova.compute.manager [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Received event network-changed-cc5ea8f1-ee22-4097-b07f-9a5183bdb994 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1487.179222] env[62820]: DEBUG nova.compute.manager [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Refreshing instance network info cache due to event network-changed-cc5ea8f1-ee22-4097-b07f-9a5183bdb994. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1487.179427] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Acquiring lock "refresh_cache-56c371a9-983f-4d5f-8abf-0183736c374c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.179566] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Acquired lock "refresh_cache-56c371a9-983f-4d5f-8abf-0183736c374c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.179732] env[62820]: DEBUG nova.network.neutron [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Refreshing network info cache for port cc5ea8f1-ee22-4097-b07f-9a5183bdb994 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1487.310647] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695565, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.379440] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695566, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.444539] env[62820]: DEBUG nova.compute.utils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1487.454190] env[62820]: DEBUG nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1487.454190] env[62820]: DEBUG nova.network.neutron [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1487.515626] env[62820]: DEBUG nova.policy [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4377a12b9043496f940848ab1061a02e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '605b35d4577f4eb1b453554ac4dd3189', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1487.652631] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695569, 'name': CreateVM_Task, 'duration_secs': 0.44537} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.652805] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1487.653509] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.653682] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.654016] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1487.654277] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93bf23e4-d60f-4905-8bda-c0e7f6990191 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.659851] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1487.659851] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52469b40-46db-b50c-de1f-b0c5aeca5875" [ 1487.659851] env[62820]: _type = "Task" [ 1487.659851] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.668584] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52469b40-46db-b50c-de1f-b0c5aeca5875, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.810099] env[62820]: DEBUG oslo_vmware.api [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695565, 'name': PowerOnVM_Task, 'duration_secs': 0.617417} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.810345] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1487.810533] env[62820]: INFO nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Took 10.26 seconds to spawn the instance on the hypervisor. [ 1487.810796] env[62820]: DEBUG nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1487.811755] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa716a2-3673-4084-9468-25945e775895 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.878401] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695566, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.653345} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.878683] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae/3fc55bd7-48b9-4e02-af19-f186f5d0c9ae.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1487.878891] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1487.879442] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67eb482b-155c-43a5-8a98-b504b1ef4b4e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.892380] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1487.892380] env[62820]: value = "task-1695570" [ 1487.892380] env[62820]: _type = "Task" [ 1487.892380] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.903910] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695570, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.953241] env[62820]: DEBUG nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1488.174672] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52469b40-46db-b50c-de1f-b0c5aeca5875, 'name': SearchDatastore_Task, 'duration_secs': 0.010098} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.174672] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.174858] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1488.175232] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.175232] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.175606] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1488.175713] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6636cdcf-0c9b-4d9f-901d-402f7ce63bef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.199780] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1488.199988] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1488.204829] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d316296b-3768-411a-8678-afe7b9d304cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.215402] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1488.215402] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526b2c99-437f-063e-c151-905bc557ef54" [ 1488.215402] env[62820]: _type = "Task" [ 1488.215402] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.226091] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526b2c99-437f-063e-c151-905bc557ef54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.335932] env[62820]: INFO nova.compute.manager [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Took 48.22 seconds to build instance. [ 1488.341301] env[62820]: DEBUG nova.network.neutron [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Successfully created port: b1412dd4-3e38-4763-a38d-3ebff9f8f873 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1488.406610] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695570, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096908} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.406909] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1488.407747] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e9b7bd-a312-4a83-ae1d-5f6515d0f598 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.432310] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae/3fc55bd7-48b9-4e02-af19-f186f5d0c9ae.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1488.435634] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b0fa774-e1d1-4d51-ac71-9a16761e5591 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.453194] env[62820]: DEBUG nova.network.neutron [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Updated VIF entry in instance network info cache for port cc5ea8f1-ee22-4097-b07f-9a5183bdb994. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1488.453556] env[62820]: DEBUG nova.network.neutron [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Updating instance_info_cache with network_info: [{"id": "cc5ea8f1-ee22-4097-b07f-9a5183bdb994", "address": "fa:16:3e:bc:bf:4b", "network": {"id": "dee74285-f5bf-48b9-b83a-04a27d2cc747", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-193696357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "552a99ce13514c8fbe1858ce4d6cd1f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc5ea8f1-ee", "ovs_interfaceid": "cc5ea8f1-ee22-4097-b07f-9a5183bdb994", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.466876] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1488.466876] env[62820]: value = "task-1695571" [ 1488.466876] env[62820]: _type = "Task" [ 1488.466876] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.481785] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695571, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.498051] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c79aa1-0b12-4d14-93c9-61a15796ea2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.512600] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d11556-7eac-4758-94e9-b7526db998ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.549459] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bcb319-8fb5-4703-ac80-ce70dea5ae1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.558330] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088b2311-514a-4e1c-aa02-63192eda1193 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.574151] env[62820]: DEBUG nova.compute.provider_tree [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.728461] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526b2c99-437f-063e-c151-905bc557ef54, 'name': SearchDatastore_Task, 'duration_secs': 0.023541} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.729357] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5383c979-51e0-42a0-9c22-59071bc5ae04 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.739120] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1488.739120] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529a3e4d-c5dd-d562-529e-ffa4901cdb17" [ 1488.739120] env[62820]: _type = "Task" [ 1488.739120] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.748706] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529a3e4d-c5dd-d562-529e-ffa4901cdb17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.838655] env[62820]: DEBUG oslo_concurrency.lockutils [None req-464e8dee-68b5-4a8f-bad2-a44ceaeab93a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.313s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.852095] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "492db939-78f4-4642-89dd-a01fa94f41b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.852233] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "492db939-78f4-4642-89dd-a01fa94f41b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.958730] env[62820]: DEBUG nova.network.neutron [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Successfully updated port: cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1488.962376] env[62820]: DEBUG oslo_concurrency.lockutils [req-b321d7e5-824f-409f-b38e-7dfa65ce5738 req-cd0a2895-a620-4524-9982-a116ad24f3d9 service nova] Releasing lock "refresh_cache-56c371a9-983f-4d5f-8abf-0183736c374c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.969621] env[62820]: DEBUG nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1488.987539] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695571, 'name': ReconfigVM_Task, 'duration_secs': 0.302845} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.987897] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae/3fc55bd7-48b9-4e02-af19-f186f5d0c9ae.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1488.989153] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2da375d-9697-448b-8591-a03ecc39a2bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.000634] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1489.000634] env[62820]: value = "task-1695572" [ 1489.000634] env[62820]: _type = "Task" [ 1489.000634] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.007526] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1489.007867] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1489.007949] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1489.008142] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1489.008288] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1489.008430] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1489.008994] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1489.009152] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1489.009339] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1489.009503] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1489.009674] env[62820]: DEBUG nova.virt.hardware [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1489.010502] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d133b94-9c9e-4d3e-a5c4-a42fba33dff6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.019798] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695572, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.023881] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88baabf1-e8f6-45ce-a5f4-c9d69e32adf3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.077419] env[62820]: DEBUG nova.scheduler.client.report [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1489.161128] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.161128] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.193224] env[62820]: DEBUG nova.compute.manager [req-ebf18aa6-c939-42f8-958b-729d4ac09e18 req-f615caf8-d3c1-4365-aed9-e635a3dfeecd service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Received event network-vif-plugged-cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1489.193224] env[62820]: DEBUG oslo_concurrency.lockutils [req-ebf18aa6-c939-42f8-958b-729d4ac09e18 req-f615caf8-d3c1-4365-aed9-e635a3dfeecd service nova] Acquiring lock "b7c9f518-c908-42cc-ba09-59b0f8431f68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.193224] env[62820]: DEBUG oslo_concurrency.lockutils [req-ebf18aa6-c939-42f8-958b-729d4ac09e18 req-f615caf8-d3c1-4365-aed9-e635a3dfeecd service nova] Lock "b7c9f518-c908-42cc-ba09-59b0f8431f68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.193224] env[62820]: DEBUG oslo_concurrency.lockutils [req-ebf18aa6-c939-42f8-958b-729d4ac09e18 req-f615caf8-d3c1-4365-aed9-e635a3dfeecd service nova] Lock "b7c9f518-c908-42cc-ba09-59b0f8431f68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.193224] env[62820]: DEBUG nova.compute.manager [req-ebf18aa6-c939-42f8-958b-729d4ac09e18 req-f615caf8-d3c1-4365-aed9-e635a3dfeecd service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] No waiting events found dispatching network-vif-plugged-cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1489.193466] env[62820]: WARNING nova.compute.manager [req-ebf18aa6-c939-42f8-958b-729d4ac09e18 req-f615caf8-d3c1-4365-aed9-e635a3dfeecd service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Received unexpected event network-vif-plugged-cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 for instance with vm_state building and task_state spawning. [ 1489.256883] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529a3e4d-c5dd-d562-529e-ffa4901cdb17, 'name': SearchDatastore_Task, 'duration_secs': 0.011277} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.257310] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.257669] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 56c371a9-983f-4d5f-8abf-0183736c374c/56c371a9-983f-4d5f-8abf-0183736c374c.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1489.257980] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-999adb5f-460e-4c49-8a2b-387e6e69d2cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.269127] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1489.269127] env[62820]: value = "task-1695573" [ 1489.269127] env[62820]: _type = "Task" [ 1489.269127] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.280829] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.361067] env[62820]: DEBUG nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1489.469203] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Acquiring lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.469203] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Acquired lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.469203] env[62820]: DEBUG nova.network.neutron [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1489.518041] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695572, 'name': Rename_Task, 'duration_secs': 0.168409} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.518041] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1489.518041] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24436042-1ba0-47e8-9c8f-60f4afdfd6db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.527930] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1489.527930] env[62820]: value = "task-1695574" [ 1489.527930] env[62820]: _type = "Task" [ 1489.527930] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.540266] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.589289] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.647s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.590091] env[62820]: DEBUG nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1489.593133] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.961s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.593415] env[62820]: DEBUG nova.objects.instance [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1489.782228] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695573, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.892311] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.042459] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695574, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.103342] env[62820]: DEBUG nova.compute.utils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1490.112022] env[62820]: DEBUG nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1490.116020] env[62820]: DEBUG nova.network.neutron [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1490.277822] env[62820]: DEBUG nova.network.neutron [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1490.285946] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565148} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.286246] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 56c371a9-983f-4d5f-8abf-0183736c374c/56c371a9-983f-4d5f-8abf-0183736c374c.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1490.286493] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1490.286764] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52e4eaa3-7378-45b8-a150-3dd0519ef700 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.294755] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1490.294755] env[62820]: value = "task-1695575" [ 1490.294755] env[62820]: _type = "Task" [ 1490.294755] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.305772] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695575, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.365321] env[62820]: DEBUG nova.policy [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3af9a43692cb48e28948f126743239f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27b5475e407246dc8e4ed098243af023', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1490.541399] env[62820]: DEBUG oslo_vmware.api [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695574, 'name': PowerOnVM_Task, 'duration_secs': 0.589343} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.542198] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1490.542568] env[62820]: INFO nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Took 9.43 seconds to spawn the instance on the hypervisor. [ 1490.543016] env[62820]: DEBUG nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1490.544041] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca438fe-6239-4496-b147-d017e0e3a733 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.612446] env[62820]: DEBUG nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1490.619973] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9d65546c-36fa-45ac-9c27-df88f6d4b692 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.027s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.621967] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.425s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.622194] env[62820]: DEBUG nova.objects.instance [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lazy-loading 'resources' on Instance uuid 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1490.792241] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.792494] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.793605] env[62820]: DEBUG nova.network.neutron [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Updating instance_info_cache with network_info: [{"id": "cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5", "address": "fa:16:3e:35:c5:b4", "network": {"id": "25947843-47e0-4ac5-86e5-8b6f798593e0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1022327073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee1a42e497e94154ac770cb4c9d0456a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd6ae25c-d8", "ovs_interfaceid": "cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.808301] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695575, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173063} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.809178] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1490.810339] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938cbdf4-c7c3-4edc-8d41-647477e6c4a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.836441] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 56c371a9-983f-4d5f-8abf-0183736c374c/56c371a9-983f-4d5f-8abf-0183736c374c.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1490.837430] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbd7d085-cc66-4dde-b06d-37938727d32f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.864029] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1490.864029] env[62820]: value = "task-1695576" [ 1490.864029] env[62820]: _type = "Task" [ 1490.864029] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.873189] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695576, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.067450] env[62820]: INFO nova.compute.manager [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Took 43.66 seconds to build instance. [ 1491.118632] env[62820]: DEBUG oslo_concurrency.lockutils [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.119281] env[62820]: DEBUG oslo_concurrency.lockutils [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.235617] env[62820]: DEBUG nova.network.neutron [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Successfully created port: 0560ed3a-b2f5-4e20-bedc-db38149bd216 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1491.295668] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Releasing lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.295977] env[62820]: DEBUG nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Instance network_info: |[{"id": "cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5", "address": "fa:16:3e:35:c5:b4", "network": {"id": "25947843-47e0-4ac5-86e5-8b6f798593e0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1022327073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee1a42e497e94154ac770cb4c9d0456a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd6ae25c-d8", "ovs_interfaceid": "cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1491.300609] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:c5:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1491.312158] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Creating folder: Project (ee1a42e497e94154ac770cb4c9d0456a). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1491.313242] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db7630e7-3684-45a5-8d40-b4f365114598 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.325812] env[62820]: DEBUG nova.network.neutron [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Successfully updated port: b1412dd4-3e38-4763-a38d-3ebff9f8f873 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1491.331429] env[62820]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1491.331609] env[62820]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62820) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1491.334719] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Folder already exists: Project (ee1a42e497e94154ac770cb4c9d0456a). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1491.335060] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Creating folder: Instances. Parent ref: group-v353501. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1491.335882] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20f4f469-f860-482a-8827-4a4ec1f4641d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.352750] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Created folder: Instances in parent group-v353501. [ 1491.355787] env[62820]: DEBUG oslo.service.loopingcall [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1491.355787] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1491.355787] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbc239fb-e05d-48fe-8640-02d8da952747 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.386656] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695576, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.391329] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1491.391329] env[62820]: value = "task-1695579" [ 1491.391329] env[62820]: _type = "Task" [ 1491.391329] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.403816] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695579, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.571045] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b39eb2b8-94ff-49a5-8ae1-d23db3b1ccb4 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.176s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.603175] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1cbdad-34ab-4645-9b64-82e7cf66450e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.613054] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64e203e-97f4-4d46-a1c4-6330d0de9b35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.646899] env[62820]: DEBUG nova.compute.utils [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1491.649123] env[62820]: DEBUG nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1491.653111] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9c0246-176f-43a6-a632-bb447bf0dc7f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.661720] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36cfa17-af14-4b49-8449-3f3d2aa0b542 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.682096] env[62820]: DEBUG nova.compute.provider_tree [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.693662] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1491.693940] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1491.694404] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.694632] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1491.694993] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.695200] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1491.695436] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1491.695644] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1491.695856] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1491.696114] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1491.696390] env[62820]: DEBUG nova.virt.hardware [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1491.697682] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd165004-9fed-49d1-8214-eb61b1fd5c10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.706990] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquiring lock "871195a8-8b7d-433f-a0b5-c570c65faf1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.707397] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "871195a8-8b7d-433f-a0b5-c570c65faf1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.713017] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bb92d6-6302-4a12-941c-2cecebd9a2af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.839503] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquiring lock "refresh_cache-c06e3dcd-b997-497c-865d-5f277695cd7a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.839503] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquired lock "refresh_cache-c06e3dcd-b997-497c-865d-5f277695cd7a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.839503] env[62820]: DEBUG nova.network.neutron [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1491.885609] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695576, 'name': ReconfigVM_Task, 'duration_secs': 0.887153} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.885822] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 56c371a9-983f-4d5f-8abf-0183736c374c/56c371a9-983f-4d5f-8abf-0183736c374c.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1491.886546] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a43b602-0d6f-4133-a82c-4f3b2a068a8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.897267] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1491.897267] env[62820]: value = "task-1695580" [ 1491.897267] env[62820]: _type = "Task" [ 1491.897267] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.904155] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695579, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.909836] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695580, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.074074] env[62820]: DEBUG nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1492.116143] env[62820]: DEBUG nova.compute.manager [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Received event network-changed-cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1492.116365] env[62820]: DEBUG nova.compute.manager [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Refreshing instance network info cache due to event network-changed-cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1492.116580] env[62820]: DEBUG oslo_concurrency.lockutils [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] Acquiring lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.116721] env[62820]: DEBUG oslo_concurrency.lockutils [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] Acquired lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.116884] env[62820]: DEBUG nova.network.neutron [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Refreshing network info cache for port cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.153164] env[62820]: DEBUG oslo_concurrency.lockutils [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.034s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.188460] env[62820]: DEBUG nova.scheduler.client.report [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1492.406237] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695579, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.407735] env[62820]: DEBUG nova.network.neutron [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1492.414103] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695580, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.599276] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.659131] env[62820]: DEBUG nova.network.neutron [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Updating instance_info_cache with network_info: [{"id": "b1412dd4-3e38-4763-a38d-3ebff9f8f873", "address": "fa:16:3e:a6:7d:9c", "network": {"id": "9f77bd89-45ce-4517-8e62-db590a30ae04", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1214478572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605b35d4577f4eb1b453554ac4dd3189", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1412dd4-3e", "ovs_interfaceid": "b1412dd4-3e38-4763-a38d-3ebff9f8f873", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.695266] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.073s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.700095] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.484s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.704474] env[62820]: INFO nova.compute.claims [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1492.735032] env[62820]: INFO nova.scheduler.client.report [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Deleted allocations for instance 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65 [ 1492.911671] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695580, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.916716] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695579, 'name': CreateVM_Task, 'duration_secs': 1.042004} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.916716] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1492.917597] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353507', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'name': 'volume-952365ce-b448-4f14-9fc5-f9b7c5180d39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b7c9f518-c908-42cc-ba09-59b0f8431f68', 'attached_at': '', 'detached_at': '', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'serial': '952365ce-b448-4f14-9fc5-f9b7c5180d39'}, 'attachment_id': 'e6ac3597-66d3-484f-9384-4a677241353b', 'delete_on_termination': True, 'boot_index': 0, 'device_type': None, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=62820) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1492.917962] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Root volume attach. Driver type: vmdk {{(pid=62820) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1492.919708] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b29e42-1c92-44ea-b5d5-3e973c9236f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.930415] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff891b9a-ca8d-448e-b326-1b1f8b29ace4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.940560] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b38ba25-ebdd-48c8-9ee6-971dc21224a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.952032] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-25d91f8d-8770-425b-861c-759b4e7c310c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.962240] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1492.962240] env[62820]: value = "task-1695581" [ 1492.962240] env[62820]: _type = "Task" [ 1492.962240] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.973913] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695581, 'name': RelocateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.974444] env[62820]: DEBUG nova.network.neutron [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Updated VIF entry in instance network info cache for port cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1492.974764] env[62820]: DEBUG nova.network.neutron [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Updating instance_info_cache with network_info: [{"id": "cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5", "address": "fa:16:3e:35:c5:b4", "network": {"id": "25947843-47e0-4ac5-86e5-8b6f798593e0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1022327073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee1a42e497e94154ac770cb4c9d0456a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd6ae25c-d8", "ovs_interfaceid": "cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.161665] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Releasing lock "refresh_cache-c06e3dcd-b997-497c-865d-5f277695cd7a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.162029] env[62820]: DEBUG nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Instance network_info: |[{"id": "b1412dd4-3e38-4763-a38d-3ebff9f8f873", "address": "fa:16:3e:a6:7d:9c", "network": {"id": "9f77bd89-45ce-4517-8e62-db590a30ae04", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1214478572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605b35d4577f4eb1b453554ac4dd3189", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1412dd4-3e", "ovs_interfaceid": "b1412dd4-3e38-4763-a38d-3ebff9f8f873", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1493.162518] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:7d:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33fdc099-7497-41c1-b40c-1558937132d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1412dd4-3e38-4763-a38d-3ebff9f8f873', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.171325] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Creating folder: Project (605b35d4577f4eb1b453554ac4dd3189). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1493.171872] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e56ff95-5d13-477a-9c67-6b2420faea40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.186257] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Created folder: Project (605b35d4577f4eb1b453554ac4dd3189) in parent group-v353379. [ 1493.186257] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Creating folder: Instances. Parent ref: group-v353531. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1493.186257] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67935744-bbac-4176-bf70-34b8fe037a3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.198218] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Created folder: Instances in parent group-v353531. [ 1493.198495] env[62820]: DEBUG oslo.service.loopingcall [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.198733] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1493.198986] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5506c5e-c550-4d9f-9f37-66c72417b29a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.227249] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.227249] env[62820]: value = "task-1695584" [ 1493.227249] env[62820]: _type = "Task" [ 1493.227249] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.238900] env[62820]: DEBUG oslo_concurrency.lockutils [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.238993] env[62820]: DEBUG oslo_concurrency.lockutils [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.239246] env[62820]: INFO nova.compute.manager [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Attaching volume ff20d603-5fe3-41ec-814a-a9f0253392bb to /dev/sdb [ 1493.242664] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695584, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.247547] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a12a534f-3db0-4404-beed-a82657a386a1 tempest-AttachInterfacesUnderV243Test-1650829253 tempest-AttachInterfacesUnderV243Test-1650829253-project-member] Lock "7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.033s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.289106] env[62820]: DEBUG nova.network.neutron [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Successfully updated port: 0560ed3a-b2f5-4e20-bedc-db38149bd216 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1493.302883] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0cdc2a-fc02-49b6-a88e-4f86d9ed2dfa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.315694] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02071821-ca82-4293-bba1-5c8f16479d4d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.331728] env[62820]: DEBUG nova.virt.block_device [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Updating existing volume attachment record: 0e6d266b-2f17-4c62-83fe-dafd4a1e7c8b {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1493.412351] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695580, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.478887] env[62820]: DEBUG oslo_concurrency.lockutils [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] Releasing lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.478887] env[62820]: DEBUG nova.compute.manager [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Received event network-vif-plugged-b1412dd4-3e38-4763-a38d-3ebff9f8f873 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1493.478887] env[62820]: DEBUG oslo_concurrency.lockutils [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] Acquiring lock "c06e3dcd-b997-497c-865d-5f277695cd7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.479124] env[62820]: DEBUG oslo_concurrency.lockutils [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] Lock "c06e3dcd-b997-497c-865d-5f277695cd7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.479312] env[62820]: DEBUG oslo_concurrency.lockutils [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] Lock "c06e3dcd-b997-497c-865d-5f277695cd7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.479517] env[62820]: DEBUG nova.compute.manager [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] No waiting events found dispatching network-vif-plugged-b1412dd4-3e38-4763-a38d-3ebff9f8f873 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1493.479722] env[62820]: WARNING nova.compute.manager [req-39f509e5-55ce-4d9c-aa1f-ea399eea02be req-f03c36cd-f250-4a32-9f5e-2fb242d9407b service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Received unexpected event network-vif-plugged-b1412dd4-3e38-4763-a38d-3ebff9f8f873 for instance with vm_state building and task_state spawning. [ 1493.488516] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695581, 'name': RelocateVM_Task} progress is 20%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.741540] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695584, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.792800] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquiring lock "refresh_cache-ee188979-e740-4125-a17f-1c02ef9588f1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.793061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquired lock "refresh_cache-ee188979-e740-4125-a17f-1c02ef9588f1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.793274] env[62820]: DEBUG nova.network.neutron [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1493.915822] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695580, 'name': Rename_Task, 'duration_secs': 1.947586} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.916792] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1493.916792] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6ba6527-171a-4904-99a1-e809f02a40a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.941342] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1493.941342] env[62820]: value = "task-1695588" [ 1493.941342] env[62820]: _type = "Task" [ 1493.941342] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.958531] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695588, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.976224] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695581, 'name': RelocateVM_Task, 'duration_secs': 0.526786} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.977208] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1493.977208] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353507', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'name': 'volume-952365ce-b448-4f14-9fc5-f9b7c5180d39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b7c9f518-c908-42cc-ba09-59b0f8431f68', 'attached_at': '', 'detached_at': '', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'serial': '952365ce-b448-4f14-9fc5-f9b7c5180d39'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1493.979552] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74da84d-00ed-436c-9a44-c650d1ab0281 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.998780] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002c7717-fce9-4505-bdb8-82695fe4c889 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.030446] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] volume-952365ce-b448-4f14-9fc5-f9b7c5180d39/volume-952365ce-b448-4f14-9fc5-f9b7c5180d39.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1494.033703] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9c4613b-f797-49d4-9452-c2304c625a9d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.055829] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1494.055829] env[62820]: value = "task-1695589" [ 1494.055829] env[62820]: _type = "Task" [ 1494.055829] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.067206] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695589, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.070391] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.070642] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.070850] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.071228] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.071228] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.074283] env[62820]: INFO nova.compute.manager [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Terminating instance [ 1494.164168] env[62820]: DEBUG nova.compute.manager [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Received event network-changed-b1412dd4-3e38-4763-a38d-3ebff9f8f873 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1494.164168] env[62820]: DEBUG nova.compute.manager [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Refreshing instance network info cache due to event network-changed-b1412dd4-3e38-4763-a38d-3ebff9f8f873. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1494.164168] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Acquiring lock "refresh_cache-c06e3dcd-b997-497c-865d-5f277695cd7a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.164168] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Acquired lock "refresh_cache-c06e3dcd-b997-497c-865d-5f277695cd7a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.164168] env[62820]: DEBUG nova.network.neutron [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Refreshing network info cache for port b1412dd4-3e38-4763-a38d-3ebff9f8f873 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1494.243473] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695584, 'name': CreateVM_Task, 'duration_secs': 0.599722} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.244247] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1494.247576] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.247878] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.247963] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1494.248546] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cd87700-13c9-4c96-8773-0e4b31ca6e75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.255864] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1494.255864] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c3133a-8fa5-2b24-6bfc-ca88485271cf" [ 1494.255864] env[62820]: _type = "Task" [ 1494.255864] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.272503] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c3133a-8fa5-2b24-6bfc-ca88485271cf, 'name': SearchDatastore_Task, 'duration_secs': 0.01278} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.273036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.273419] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.273779] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.274107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.274419] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1494.274826] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-637e296a-b5b6-4772-8314-bf312ed63714 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.294236] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1494.294236] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1494.294541] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5337bd89-5d44-4ef2-8c33-f5a985e18c12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.306699] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1494.306699] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52924a81-3e10-c502-aabb-1b0521f18276" [ 1494.306699] env[62820]: _type = "Task" [ 1494.306699] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.315880] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52924a81-3e10-c502-aabb-1b0521f18276, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.384505] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e0a0d4-b3bd-46c8-bcee-4ef9ee2eb05d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.399757] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7094198-fc3b-4793-ab8a-dcf6b54990a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.447800] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27106c47-efa8-474f-87ae-68a83b9b7554 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.459389] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2bb8b2-bb2d-42c6-87f9-7dfa931e6de2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.463688] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695588, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.475449] env[62820]: DEBUG nova.compute.provider_tree [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.570183] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695589, 'name': ReconfigVM_Task, 'duration_secs': 0.398736} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.573889] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Reconfigured VM instance instance-0000002f to attach disk [datastore1] volume-952365ce-b448-4f14-9fc5-f9b7c5180d39/volume-952365ce-b448-4f14-9fc5-f9b7c5180d39.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1494.578586] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-addb86de-00fb-4ed4-b8de-2a21cbe79d05 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.592758] env[62820]: DEBUG nova.network.neutron [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1494.596955] env[62820]: DEBUG nova.compute.manager [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1494.596955] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1494.596955] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd18a21-bca6-4771-80cb-960c969eba6e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.608180] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1494.608496] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1494.608496] env[62820]: value = "task-1695590" [ 1494.608496] env[62820]: _type = "Task" [ 1494.608496] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.608756] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08b3a764-4cd0-45ee-83fb-13457c10a0ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.622110] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695590, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.624842] env[62820]: DEBUG oslo_vmware.api [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1494.624842] env[62820]: value = "task-1695591" [ 1494.624842] env[62820]: _type = "Task" [ 1494.624842] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.633092] env[62820]: DEBUG oslo_vmware.api [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.824977] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52924a81-3e10-c502-aabb-1b0521f18276, 'name': SearchDatastore_Task, 'duration_secs': 0.011026} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.826513] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8cc84b5-1172-41d6-9f21-202b50c19743 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.835504] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1494.835504] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523ecfa4-8680-77f5-5a5d-b98bbc7304ac" [ 1494.835504] env[62820]: _type = "Task" [ 1494.835504] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.844966] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523ecfa4-8680-77f5-5a5d-b98bbc7304ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.961022] env[62820]: DEBUG oslo_vmware.api [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695588, 'name': PowerOnVM_Task, 'duration_secs': 0.672885} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.961022] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1494.961022] env[62820]: INFO nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Took 12.26 seconds to spawn the instance on the hypervisor. [ 1494.961022] env[62820]: DEBUG nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1494.961022] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929fa0f9-80aa-4df6-b96d-d90d9a2004ea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.981413] env[62820]: DEBUG nova.scheduler.client.report [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1495.024023] env[62820]: DEBUG nova.network.neutron [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Updating instance_info_cache with network_info: [{"id": "0560ed3a-b2f5-4e20-bedc-db38149bd216", "address": "fa:16:3e:dc:d4:33", "network": {"id": "1256f644-afdd-4ed0-a127-fe32e5cdd536", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-967603757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27b5475e407246dc8e4ed098243af023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0560ed3a-b2", "ovs_interfaceid": "0560ed3a-b2f5-4e20-bedc-db38149bd216", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.124558] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695590, 'name': ReconfigVM_Task, 'duration_secs': 0.185085} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.128524] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353507', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'name': 'volume-952365ce-b448-4f14-9fc5-f9b7c5180d39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b7c9f518-c908-42cc-ba09-59b0f8431f68', 'attached_at': '', 'detached_at': '', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'serial': '952365ce-b448-4f14-9fc5-f9b7c5180d39'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1495.129098] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17232c68-0909-4918-87b8-3acd6def997f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.137145] env[62820]: DEBUG oslo_vmware.api [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695591, 'name': PowerOffVM_Task, 'duration_secs': 0.272256} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.138523] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1495.138693] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1495.139018] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1495.139018] env[62820]: value = "task-1695592" [ 1495.139018] env[62820]: _type = "Task" [ 1495.139018] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.139370] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-561729e4-93f3-4d2d-8fd6-9f25e49a3f20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.151457] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695592, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.161775] env[62820]: DEBUG nova.network.neutron [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Updated VIF entry in instance network info cache for port b1412dd4-3e38-4763-a38d-3ebff9f8f873. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1495.162336] env[62820]: DEBUG nova.network.neutron [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Updating instance_info_cache with network_info: [{"id": "b1412dd4-3e38-4763-a38d-3ebff9f8f873", "address": "fa:16:3e:a6:7d:9c", "network": {"id": "9f77bd89-45ce-4517-8e62-db590a30ae04", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1214478572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605b35d4577f4eb1b453554ac4dd3189", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1412dd4-3e", "ovs_interfaceid": "b1412dd4-3e38-4763-a38d-3ebff9f8f873", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.348865] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523ecfa4-8680-77f5-5a5d-b98bbc7304ac, 'name': SearchDatastore_Task, 'duration_secs': 0.011494} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.349143] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.349233] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c06e3dcd-b997-497c-865d-5f277695cd7a/c06e3dcd-b997-497c-865d-5f277695cd7a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1495.349568] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa65340a-9292-411f-9fdd-522a6516b32a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.359753] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1495.359753] env[62820]: value = "task-1695594" [ 1495.359753] env[62820]: _type = "Task" [ 1495.359753] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.370661] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.480299] env[62820]: INFO nova.compute.manager [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Took 48.05 seconds to build instance. [ 1495.490929] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.788s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.490929] env[62820]: DEBUG nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1495.492012] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.800s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.492385] env[62820]: DEBUG nova.objects.instance [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lazy-loading 'resources' on Instance uuid f186854d-3f0a-4512-83b9-2c946247ccbe {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.528081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Releasing lock "refresh_cache-ee188979-e740-4125-a17f-1c02ef9588f1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.528081] env[62820]: DEBUG nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Instance network_info: |[{"id": "0560ed3a-b2f5-4e20-bedc-db38149bd216", "address": "fa:16:3e:dc:d4:33", "network": {"id": "1256f644-afdd-4ed0-a127-fe32e5cdd536", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-967603757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27b5475e407246dc8e4ed098243af023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0560ed3a-b2", "ovs_interfaceid": "0560ed3a-b2f5-4e20-bedc-db38149bd216", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1495.528081] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:d4:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0560ed3a-b2f5-4e20-bedc-db38149bd216', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1495.534883] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Creating folder: Project (27b5475e407246dc8e4ed098243af023). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1495.536337] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4beaba12-863a-4e36-a731-0c6365c99c4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.553424] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Created folder: Project (27b5475e407246dc8e4ed098243af023) in parent group-v353379. [ 1495.553424] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Creating folder: Instances. Parent ref: group-v353536. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1495.553424] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d19d4d34-80d4-4c89-9726-2545f531be07 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.568560] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Created folder: Instances in parent group-v353536. [ 1495.568966] env[62820]: DEBUG oslo.service.loopingcall [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.569337] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1495.569679] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7bd6eff1-7ef2-4ec2-9d45-7d30e9e8a39a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.600517] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1495.600517] env[62820]: value = "task-1695597" [ 1495.600517] env[62820]: _type = "Task" [ 1495.600517] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.609730] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695597, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.661223] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695592, 'name': Rename_Task, 'duration_secs': 0.239808} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.661589] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1495.661898] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c79181d5-9f26-4a40-91af-0f483379b068 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.667860] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Releasing lock "refresh_cache-c06e3dcd-b997-497c-865d-5f277695cd7a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.667860] env[62820]: DEBUG nova.compute.manager [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Received event network-vif-plugged-0560ed3a-b2f5-4e20-bedc-db38149bd216 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1495.667860] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Acquiring lock "ee188979-e740-4125-a17f-1c02ef9588f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.668602] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Lock "ee188979-e740-4125-a17f-1c02ef9588f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.668602] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Lock "ee188979-e740-4125-a17f-1c02ef9588f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.668602] env[62820]: DEBUG nova.compute.manager [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] No waiting events found dispatching network-vif-plugged-0560ed3a-b2f5-4e20-bedc-db38149bd216 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1495.668835] env[62820]: WARNING nova.compute.manager [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Received unexpected event network-vif-plugged-0560ed3a-b2f5-4e20-bedc-db38149bd216 for instance with vm_state building and task_state spawning. [ 1495.669037] env[62820]: DEBUG nova.compute.manager [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Received event network-changed-0560ed3a-b2f5-4e20-bedc-db38149bd216 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1495.669241] env[62820]: DEBUG nova.compute.manager [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Refreshing instance network info cache due to event network-changed-0560ed3a-b2f5-4e20-bedc-db38149bd216. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1495.669465] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Acquiring lock "refresh_cache-ee188979-e740-4125-a17f-1c02ef9588f1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.669610] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Acquired lock "refresh_cache-ee188979-e740-4125-a17f-1c02ef9588f1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.669788] env[62820]: DEBUG nova.network.neutron [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Refreshing network info cache for port 0560ed3a-b2f5-4e20-bedc-db38149bd216 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.679542] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1495.679542] env[62820]: value = "task-1695598" [ 1495.679542] env[62820]: _type = "Task" [ 1495.679542] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.701267] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.776752] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1495.777357] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1495.777357] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Deleting the datastore file [datastore1] 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1495.777729] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-037e1c77-8d10-4b30-8a4b-0463be7da0ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.791109] env[62820]: DEBUG oslo_vmware.api [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for the task: (returnval){ [ 1495.791109] env[62820]: value = "task-1695600" [ 1495.791109] env[62820]: _type = "Task" [ 1495.791109] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.804149] env[62820]: DEBUG oslo_vmware.api [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.874641] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695594, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.983157] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c9d2211-024c-43c6-8439-630eb8e81a9f tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "56c371a9-983f-4d5f-8abf-0183736c374c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.572s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.997735] env[62820]: DEBUG nova.compute.utils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1496.010368] env[62820]: DEBUG nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1496.010368] env[62820]: DEBUG nova.network.neutron [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1496.116485] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695597, 'name': CreateVM_Task, 'duration_secs': 0.466289} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.116720] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1496.119721] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.119721] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.119721] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1496.120886] env[62820]: DEBUG nova.policy [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b328ffc83d344899fcbbb6e9ade1698', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bdc42fe98fb43d7bd92e2dd789aff93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1496.123125] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd0935e4-68bd-484d-b56d-e1e35427e52f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.130035] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1496.130035] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521625e4-515d-a67d-95fc-6f16d36f6c77" [ 1496.130035] env[62820]: _type = "Task" [ 1496.130035] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.144988] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521625e4-515d-a67d-95fc-6f16d36f6c77, 'name': SearchDatastore_Task, 'duration_secs': 0.010216} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.148760] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.149207] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1496.150246] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.150246] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.150246] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1496.150418] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3574a36-78b0-46be-b08e-58c5ea7035e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.166256] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1496.166256] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1496.166905] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cef13edc-9bf9-419b-8d8c-0d1ef655ee94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.173365] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1496.173365] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a40d3f-f635-1a27-6bc3-20dfba1c2579" [ 1496.173365] env[62820]: _type = "Task" [ 1496.173365] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.192666] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a40d3f-f635-1a27-6bc3-20dfba1c2579, 'name': SearchDatastore_Task} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.193980] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c40884f-0d38-46fd-9954-da7d2c1cbb6d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.200154] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695598, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.207067] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1496.207067] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525c00e3-f7d9-65b1-1c33-ac52508f686a" [ 1496.207067] env[62820]: _type = "Task" [ 1496.207067] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.217354] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525c00e3-f7d9-65b1-1c33-ac52508f686a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.308102] env[62820]: DEBUG oslo_vmware.api [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Task: {'id': task-1695600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33393} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.308102] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1496.308261] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1496.308620] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1496.308620] env[62820]: INFO nova.compute.manager [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1496.308764] env[62820]: DEBUG oslo.service.loopingcall [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1496.308941] env[62820]: DEBUG nova.compute.manager [-] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1496.309156] env[62820]: DEBUG nova.network.neutron [-] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1496.375742] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695594, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6287} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.375874] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c06e3dcd-b997-497c-865d-5f277695cd7a/c06e3dcd-b997-497c-865d-5f277695cd7a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1496.376098] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1496.376357] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c2b0be0-c909-4914-8c33-9f09155ff872 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.388287] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1496.388287] env[62820]: value = "task-1695601" [ 1496.388287] env[62820]: _type = "Task" [ 1496.388287] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.400286] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.486863] env[62820]: DEBUG nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1496.503465] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquiring lock "7a755ef6-67bc-4242-9343-c54c8566adf8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.503465] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "7a755ef6-67bc-4242-9343-c54c8566adf8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.503609] env[62820]: DEBUG nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1496.514102] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11cb09f-c23b-485b-91c2-c9b755140c19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.521292] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e90e548-5c67-4024-819f-3502b6ef2492 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.557748] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d627d58-4745-4b92-9ad0-9baa893fed4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.566865] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de9a300-d15b-4ad0-bab9-c481c5174354 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.584592] env[62820]: DEBUG nova.compute.provider_tree [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.692541] env[62820]: DEBUG oslo_vmware.api [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1695598, 'name': PowerOnVM_Task, 'duration_secs': 0.589837} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.692800] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1496.692985] env[62820]: INFO nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Took 9.62 seconds to spawn the instance on the hypervisor. [ 1496.693406] env[62820]: DEBUG nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1496.694226] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6105ec5f-d0a6-4a2f-a51d-e6e47467c788 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.720174] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525c00e3-f7d9-65b1-1c33-ac52508f686a, 'name': SearchDatastore_Task, 'duration_secs': 0.013303} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.720174] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.720509] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ee188979-e740-4125-a17f-1c02ef9588f1/ee188979-e740-4125-a17f-1c02ef9588f1.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1496.720662] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87dfa411-86e8-4fa2-9a54-68d62f737458 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.731998] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1496.731998] env[62820]: value = "task-1695602" [ 1496.731998] env[62820]: _type = "Task" [ 1496.731998] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.744107] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.901360] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097093} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.901692] env[62820]: DEBUG nova.network.neutron [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Updated VIF entry in instance network info cache for port 0560ed3a-b2f5-4e20-bedc-db38149bd216. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1496.901905] env[62820]: DEBUG nova.network.neutron [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Updating instance_info_cache with network_info: [{"id": "0560ed3a-b2f5-4e20-bedc-db38149bd216", "address": "fa:16:3e:dc:d4:33", "network": {"id": "1256f644-afdd-4ed0-a127-fe32e5cdd536", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-967603757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27b5475e407246dc8e4ed098243af023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0560ed3a-b2", "ovs_interfaceid": "0560ed3a-b2f5-4e20-bedc-db38149bd216", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.904825] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1496.904952] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5dba999-afbe-4700-943e-97feb90bd691 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.935530] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] c06e3dcd-b997-497c-865d-5f277695cd7a/c06e3dcd-b997-497c-865d-5f277695cd7a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1496.936577] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45336bab-d3d0-422a-ae54-0aec0afac9ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.965356] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1496.965356] env[62820]: value = "task-1695603" [ 1496.965356] env[62820]: _type = "Task" [ 1496.965356] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.980436] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695603, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.024311] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.034266] env[62820]: DEBUG nova.network.neutron [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Successfully created port: fe54bd71-0f0f-4124-aaca-84f035a9773a {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1497.089143] env[62820]: DEBUG nova.scheduler.client.report [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1497.189762] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.189762] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.217108] env[62820]: INFO nova.compute.manager [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Took 48.79 seconds to build instance. [ 1497.251126] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695602, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.406326] env[62820]: DEBUG oslo_concurrency.lockutils [req-a78ed4ee-e3f6-449c-b177-09b347caea82 req-d7b622fc-3084-48ba-9481-35a9d71f27e5 service nova] Releasing lock "refresh_cache-ee188979-e740-4125-a17f-1c02ef9588f1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.434705] env[62820]: DEBUG nova.compute.manager [req-4f333f9e-bdc3-4704-b160-643b7c277acc req-c80844d9-70f2-46d8-a373-10b53c2daf59 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Received event network-vif-deleted-917cc76e-f48e-4b85-8eb5-554dffb05814 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1497.434804] env[62820]: INFO nova.compute.manager [req-4f333f9e-bdc3-4704-b160-643b7c277acc req-c80844d9-70f2-46d8-a373-10b53c2daf59 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Neutron deleted interface 917cc76e-f48e-4b85-8eb5-554dffb05814; detaching it from the instance and deleting it from the info cache [ 1497.434957] env[62820]: DEBUG nova.network.neutron [req-4f333f9e-bdc3-4704-b160-643b7c277acc req-c80844d9-70f2-46d8-a373-10b53c2daf59 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.480371] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695603, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.517785] env[62820]: DEBUG nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1497.545831] env[62820]: DEBUG nova.network.neutron [-] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.551833] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1497.552105] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1497.552261] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1497.552530] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1497.552878] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1497.552953] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1497.553202] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1497.553369] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1497.553540] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1497.553706] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1497.553877] env[62820]: DEBUG nova.virt.hardware [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1497.555039] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345ed047-1560-44c7-9601-867d58c4b1c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.566058] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0b99e5-37f4-44b9-bcda-19ecf5e3a11e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.599524] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.602669] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.127s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.602669] env[62820]: DEBUG nova.objects.instance [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lazy-loading 'resources' on Instance uuid 1926c780-faea-40d8-a00b-6ad576349a68 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1497.639294] env[62820]: INFO nova.scheduler.client.report [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted allocations for instance f186854d-3f0a-4512-83b9-2c946247ccbe [ 1497.721830] env[62820]: DEBUG oslo_concurrency.lockutils [None req-da1e52e3-5129-4e99-b52f-7801d1c182b9 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "b7c9f518-c908-42cc-ba09-59b0f8431f68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.314s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.748795] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695602, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59446} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.749779] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ee188979-e740-4125-a17f-1c02ef9588f1/ee188979-e740-4125-a17f-1c02ef9588f1.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1497.750023] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1497.750297] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85615e27-b970-4c9b-ad1f-3cc81a9b0df0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.760610] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1497.760610] env[62820]: value = "task-1695604" [ 1497.760610] env[62820]: _type = "Task" [ 1497.760610] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.770429] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695604, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.773751] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquiring lock "56c371a9-983f-4d5f-8abf-0183736c374c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.773989] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "56c371a9-983f-4d5f-8abf-0183736c374c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.775183] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquiring lock "56c371a9-983f-4d5f-8abf-0183736c374c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.775183] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "56c371a9-983f-4d5f-8abf-0183736c374c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.775183] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "56c371a9-983f-4d5f-8abf-0183736c374c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.783660] env[62820]: INFO nova.compute.manager [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Terminating instance [ 1497.888504] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1497.890414] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353535', 'volume_id': 'ff20d603-5fe3-41ec-814a-a9f0253392bb', 'name': 'volume-ff20d603-5fe3-41ec-814a-a9f0253392bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '09ab63ae-fd36-4915-8c59-9d9bc5833288', 'attached_at': '', 'detached_at': '', 'volume_id': 'ff20d603-5fe3-41ec-814a-a9f0253392bb', 'serial': 'ff20d603-5fe3-41ec-814a-a9f0253392bb'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1497.890414] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69bb946-ce35-4f65-833c-fcb2f92ef556 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.911834] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd929adf-35c3-4903-aefc-5b2ab1c3e3ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.941825] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] volume-ff20d603-5fe3-41ec-814a-a9f0253392bb/volume-ff20d603-5fe3-41ec-814a-a9f0253392bb.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1497.942521] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5112a38-0b92-49ed-afb3-0ab5d2215009 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.957533] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bad8b27c-253b-48dd-8c73-40f77bf588de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.968480] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f089a1-8fa5-43e4-b9ee-6356c32e911c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.979789] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1497.979789] env[62820]: value = "task-1695605" [ 1497.979789] env[62820]: _type = "Task" [ 1497.979789] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.992855] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695603, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.995792] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.006091] env[62820]: DEBUG nova.compute.manager [req-4f333f9e-bdc3-4704-b160-643b7c277acc req-c80844d9-70f2-46d8-a373-10b53c2daf59 service nova] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Detach interface failed, port_id=917cc76e-f48e-4b85-8eb5-554dffb05814, reason: Instance 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1498.055949] env[62820]: INFO nova.compute.manager [-] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Took 1.75 seconds to deallocate network for instance. [ 1498.150507] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c161935d-8cb8-4738-8037-e0a34cf303f3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "f186854d-3f0a-4512-83b9-2c946247ccbe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.502s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.226809] env[62820]: DEBUG nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1498.275962] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695604, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075441} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.282973] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1498.286057] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbedfba5-4683-40c9-9789-9ae0d4af9887 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.289745] env[62820]: DEBUG nova.compute.manager [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1498.289907] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1498.291084] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6708d4-5178-45d5-bcce-e0627c3c2ff0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.300577] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1498.309381] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d4eb347-a202-46c8-acff-b62ffc79cb8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.321278] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] ee188979-e740-4125-a17f-1c02ef9588f1/ee188979-e740-4125-a17f-1c02ef9588f1.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1498.324225] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b98d75d-9366-43e2-8c63-535fb9998e39 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.347788] env[62820]: DEBUG oslo_vmware.api [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1498.347788] env[62820]: value = "task-1695606" [ 1498.347788] env[62820]: _type = "Task" [ 1498.347788] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.349475] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1498.349475] env[62820]: value = "task-1695607" [ 1498.349475] env[62820]: _type = "Task" [ 1498.349475] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.369154] env[62820]: DEBUG oslo_vmware.api [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695606, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.369154] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695607, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.493783] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.497152] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695603, 'name': ReconfigVM_Task, 'duration_secs': 1.469039} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.500208] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Reconfigured VM instance instance-00000030 to attach disk [datastore1] c06e3dcd-b997-497c-865d-5f277695cd7a/c06e3dcd-b997-497c-865d-5f277695cd7a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1498.500946] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89ca2b4c-6020-44af-b8eb-3c8047210d13 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.509437] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1498.509437] env[62820]: value = "task-1695608" [ 1498.509437] env[62820]: _type = "Task" [ 1498.509437] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.521064] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695608, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.562099] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.669009] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359b0038-ba9f-4e52-97d4-b69b9c5820b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.677133] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e431658d-b6b2-4493-9679-e054f432e2fe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.718584] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28727f6-bfaf-4a65-b200-91648b629bae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.727532] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e0c2de-d12f-490f-81de-730232ad88c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.745104] env[62820]: DEBUG nova.compute.provider_tree [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.751871] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.864752] env[62820]: DEBUG oslo_vmware.api [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695606, 'name': PowerOffVM_Task, 'duration_secs': 0.236013} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.868671] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.868988] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1498.869295] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695607, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.869539] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9dfb2deb-81d7-4de5-aac9-53d4f827829a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.000647] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.003264] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1499.003364] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1499.003617] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Deleting the datastore file [datastore1] 56c371a9-983f-4d5f-8abf-0183736c374c {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.003834] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c32c9fd5-dff6-44ac-9fab-320083737b0d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.015371] env[62820]: DEBUG oslo_vmware.api [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for the task: (returnval){ [ 1499.015371] env[62820]: value = "task-1695610" [ 1499.015371] env[62820]: _type = "Task" [ 1499.015371] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.026375] env[62820]: DEBUG oslo_vmware.api [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.029746] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695608, 'name': Rename_Task, 'duration_secs': 0.298035} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.030016] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1499.030269] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f44abd8-3f78-4f40-ba94-e9d8b2576400 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.038531] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1499.038531] env[62820]: value = "task-1695611" [ 1499.038531] env[62820]: _type = "Task" [ 1499.038531] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.048949] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.080546] env[62820]: DEBUG nova.network.neutron [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Successfully updated port: fe54bd71-0f0f-4124-aaca-84f035a9773a {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1499.250169] env[62820]: DEBUG nova.scheduler.client.report [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1499.365280] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695607, 'name': ReconfigVM_Task, 'duration_secs': 0.592363} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.366346] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Reconfigured VM instance instance-00000031 to attach disk [datastore1] ee188979-e740-4125-a17f-1c02ef9588f1/ee188979-e740-4125-a17f-1c02ef9588f1.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1499.367340] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19a84165-4ec6-482d-8034-7e2a7e17b786 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.376688] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1499.376688] env[62820]: value = "task-1695612" [ 1499.376688] env[62820]: _type = "Task" [ 1499.376688] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.392613] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695612, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.466357] env[62820]: DEBUG nova.compute.manager [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Received event network-vif-plugged-fe54bd71-0f0f-4124-aaca-84f035a9773a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1499.466357] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Acquiring lock "9114a81d-86a9-493b-9c07-c4724a0588ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.466357] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Lock "9114a81d-86a9-493b-9c07-c4724a0588ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.467053] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Lock "9114a81d-86a9-493b-9c07-c4724a0588ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.467504] env[62820]: DEBUG nova.compute.manager [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] No waiting events found dispatching network-vif-plugged-fe54bd71-0f0f-4124-aaca-84f035a9773a {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1499.467839] env[62820]: WARNING nova.compute.manager [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Received unexpected event network-vif-plugged-fe54bd71-0f0f-4124-aaca-84f035a9773a for instance with vm_state building and task_state spawning. [ 1499.468305] env[62820]: DEBUG nova.compute.manager [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Received event network-changed-cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1499.468608] env[62820]: DEBUG nova.compute.manager [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Refreshing instance network info cache due to event network-changed-cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1499.468959] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Acquiring lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.469298] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Acquired lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.469628] env[62820]: DEBUG nova.network.neutron [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Refreshing network info cache for port cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.497789] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695605, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.530523] env[62820]: DEBUG oslo_vmware.api [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Task: {'id': task-1695610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325955} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.530523] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1499.530523] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1499.530523] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1499.530523] env[62820]: INFO nova.compute.manager [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1499.533792] env[62820]: DEBUG oslo.service.loopingcall [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.533792] env[62820]: DEBUG nova.compute.manager [-] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1499.533792] env[62820]: DEBUG nova.network.neutron [-] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.551483] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695611, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.584438] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "refresh_cache-9114a81d-86a9-493b-9c07-c4724a0588ac" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.585064] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "refresh_cache-9114a81d-86a9-493b-9c07-c4724a0588ac" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.585251] env[62820]: DEBUG nova.network.neutron [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1499.753787] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.152s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.756683] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.788s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.758488] env[62820]: INFO nova.compute.claims [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1499.779412] env[62820]: INFO nova.scheduler.client.report [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Deleted allocations for instance 1926c780-faea-40d8-a00b-6ad576349a68 [ 1499.889395] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695612, 'name': Rename_Task, 'duration_secs': 0.439472} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.890052] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1499.890052] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1228e1c-f3fa-416b-8f31-b0b8f1871cc1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.897098] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1499.897098] env[62820]: value = "task-1695613" [ 1499.897098] env[62820]: _type = "Task" [ 1499.897098] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.906661] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.998993] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695605, 'name': ReconfigVM_Task, 'duration_secs': 1.598524} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.998993] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Reconfigured VM instance instance-0000002c to attach disk [datastore1] volume-ff20d603-5fe3-41ec-814a-a9f0253392bb/volume-ff20d603-5fe3-41ec-814a-a9f0253392bb.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1500.007838] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9350e1d7-a335-454e-9f2e-17d1994fb379 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.028930] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1500.028930] env[62820]: value = "task-1695614" [ 1500.028930] env[62820]: _type = "Task" [ 1500.028930] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.035709] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695614, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.049857] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695611, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.137844] env[62820]: DEBUG nova.network.neutron [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1500.291735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0c22563d-db40-42fe-b200-2be6e3a065d9 tempest-ServerPasswordTestJSON-1335208429 tempest-ServerPasswordTestJSON-1335208429-project-member] Lock "1926c780-faea-40d8-a00b-6ad576349a68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.367s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.372826] env[62820]: DEBUG nova.network.neutron [-] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.375347] env[62820]: DEBUG nova.network.neutron [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Updated VIF entry in instance network info cache for port cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.375674] env[62820]: DEBUG nova.network.neutron [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Updating instance_info_cache with network_info: [{"id": "cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5", "address": "fa:16:3e:35:c5:b4", "network": {"id": "25947843-47e0-4ac5-86e5-8b6f798593e0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1022327073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee1a42e497e94154ac770cb4c9d0456a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd6ae25c-d8", "ovs_interfaceid": "cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.396017] env[62820]: DEBUG nova.network.neutron [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Updating instance_info_cache with network_info: [{"id": "fe54bd71-0f0f-4124-aaca-84f035a9773a", "address": "fa:16:3e:7c:8a:c5", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe54bd71-0f", "ovs_interfaceid": "fe54bd71-0f0f-4124-aaca-84f035a9773a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.413832] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695613, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.540977] env[62820]: DEBUG oslo_vmware.api [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695614, 'name': ReconfigVM_Task, 'duration_secs': 0.215576} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.545455] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353535', 'volume_id': 'ff20d603-5fe3-41ec-814a-a9f0253392bb', 'name': 'volume-ff20d603-5fe3-41ec-814a-a9f0253392bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '09ab63ae-fd36-4915-8c59-9d9bc5833288', 'attached_at': '', 'detached_at': '', 'volume_id': 'ff20d603-5fe3-41ec-814a-a9f0253392bb', 'serial': 'ff20d603-5fe3-41ec-814a-a9f0253392bb'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1500.554166] env[62820]: DEBUG oslo_vmware.api [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695611, 'name': PowerOnVM_Task, 'duration_secs': 1.231549} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.556083] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1500.556083] env[62820]: INFO nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Took 11.59 seconds to spawn the instance on the hypervisor. [ 1500.556083] env[62820]: DEBUG nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1500.556312] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587f470d-6955-4fce-902b-2879c623ab24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.879055] env[62820]: INFO nova.compute.manager [-] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Took 1.35 seconds to deallocate network for instance. [ 1500.881765] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Releasing lock "refresh_cache-b7c9f518-c908-42cc-ba09-59b0f8431f68" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.882013] env[62820]: DEBUG nova.compute.manager [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Received event network-changed-fe54bd71-0f0f-4124-aaca-84f035a9773a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1500.882191] env[62820]: DEBUG nova.compute.manager [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Refreshing instance network info cache due to event network-changed-fe54bd71-0f0f-4124-aaca-84f035a9773a. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1500.882370] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Acquiring lock "refresh_cache-9114a81d-86a9-493b-9c07-c4724a0588ac" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.898719] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "refresh_cache-9114a81d-86a9-493b-9c07-c4724a0588ac" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.902024] env[62820]: DEBUG nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Instance network_info: |[{"id": "fe54bd71-0f0f-4124-aaca-84f035a9773a", "address": "fa:16:3e:7c:8a:c5", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe54bd71-0f", "ovs_interfaceid": "fe54bd71-0f0f-4124-aaca-84f035a9773a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1500.902024] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Acquired lock "refresh_cache-9114a81d-86a9-493b-9c07-c4724a0588ac" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.902024] env[62820]: DEBUG nova.network.neutron [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Refreshing network info cache for port fe54bd71-0f0f-4124-aaca-84f035a9773a {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1500.903630] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:8a:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe54bd71-0f0f-4124-aaca-84f035a9773a', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1500.911815] env[62820]: DEBUG oslo.service.loopingcall [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.920441] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1500.921811] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8106d2f3-a45a-41e5-9c66-a34fcbaa3420 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.943349] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695613, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.948412] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1500.948412] env[62820]: value = "task-1695615" [ 1500.948412] env[62820]: _type = "Task" [ 1500.948412] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.957646] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695615, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.087026] env[62820]: INFO nova.compute.manager [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Took 43.25 seconds to build instance. [ 1501.337063] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9124f1-0d62-4d67-aa61-ad694f6700bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.345702] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ece2d5e-c5a8-4c42-abd1-ff61655d32d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.380970] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ba9556-379a-4195-8aec-3372908ecc1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.390321] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8767768-e7b9-43d6-a70d-1f25659a5acd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.402215] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.423357] env[62820]: DEBUG nova.compute.provider_tree [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.425836] env[62820]: DEBUG nova.network.neutron [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Updated VIF entry in instance network info cache for port fe54bd71-0f0f-4124-aaca-84f035a9773a. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1501.426185] env[62820]: DEBUG nova.network.neutron [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Updating instance_info_cache with network_info: [{"id": "fe54bd71-0f0f-4124-aaca-84f035a9773a", "address": "fa:16:3e:7c:8a:c5", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe54bd71-0f", "ovs_interfaceid": "fe54bd71-0f0f-4124-aaca-84f035a9773a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.434924] env[62820]: DEBUG oslo_vmware.api [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695613, 'name': PowerOnVM_Task, 'duration_secs': 1.318102} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.434924] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1501.434924] env[62820]: INFO nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Took 9.79 seconds to spawn the instance on the hypervisor. [ 1501.434924] env[62820]: DEBUG nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1501.436355] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2afcdf-7ac9-42fb-96e9-4a96da427ca9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.458348] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695615, 'name': CreateVM_Task, 'duration_secs': 0.468225} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.458516] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1501.459209] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.459370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.459725] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1501.460799] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffd47f62-b7e1-4494-9fa0-d6f47b096e3c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.466081] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1501.466081] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a9d6f9-f295-6cfc-216c-d77c2d016819" [ 1501.466081] env[62820]: _type = "Task" [ 1501.466081] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.473346] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a9d6f9-f295-6cfc-216c-d77c2d016819, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.589200] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cba07e-9e18-4fc3-ac1e-feebf20cf322 tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "c06e3dcd-b997-497c-865d-5f277695cd7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.769s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.613296] env[62820]: DEBUG nova.objects.instance [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'flavor' on Instance uuid 09ab63ae-fd36-4915-8c59-9d9bc5833288 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1501.658271] env[62820]: DEBUG nova.compute.manager [req-244f7712-dcae-410e-9217-8e08b82a62b6 req-1b5f759d-8138-4503-8377-077597bfbcc5 service nova] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Received event network-vif-deleted-cc5ea8f1-ee22-4097-b07f-9a5183bdb994 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1501.903565] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.929589] env[62820]: DEBUG nova.scheduler.client.report [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1501.933548] env[62820]: DEBUG oslo_concurrency.lockutils [req-3e7a3d3a-cd2e-4f7c-aa1e-cddd0a6a600f req-7c4c766a-14e3-4995-b41e-d7ece37a3e7a service nova] Releasing lock "refresh_cache-9114a81d-86a9-493b-9c07-c4724a0588ac" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.954420] env[62820]: INFO nova.compute.manager [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Took 42.54 seconds to build instance. [ 1501.984241] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a9d6f9-f295-6cfc-216c-d77c2d016819, 'name': SearchDatastore_Task, 'duration_secs': 0.03075} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.984575] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.984917] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1501.985067] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.985194] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.985372] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1501.985640] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5edd776f-504f-4397-8fc1-32934e6eaf34 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.995171] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1501.995362] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1501.996118] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33888e6a-256c-4131-a249-f423eee16320 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.004886] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1502.004886] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52448b8f-6166-eda9-dbce-beedcea22cee" [ 1502.004886] env[62820]: _type = "Task" [ 1502.004886] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.013531] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52448b8f-6166-eda9-dbce-beedcea22cee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.092460] env[62820]: DEBUG nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1502.122284] env[62820]: DEBUG oslo_concurrency.lockutils [None req-34bf4ae5-1b86-4fcb-9d78-7988b4c0be57 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.882s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.122284] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.218s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.122284] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "09ab63ae-fd36-4915-8c59-9d9bc5833288-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.122284] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.122465] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.126368] env[62820]: INFO nova.compute.manager [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Terminating instance [ 1502.435483] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.679s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.436032] env[62820]: DEBUG nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1502.440050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.129s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.444130] env[62820]: DEBUG nova.objects.instance [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lazy-loading 'resources' on Instance uuid e45cdcfb-f2ce-4798-8e97-1c3f95e61db3 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1502.455648] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0b901cf8-52a7-4a77-a518-48e288acce28 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "ee188979-e740-4125-a17f-1c02ef9588f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.052s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.516763] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52448b8f-6166-eda9-dbce-beedcea22cee, 'name': SearchDatastore_Task, 'duration_secs': 0.009834} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.517690] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2c052cb-63c4-45b7-b321-1eeb34554691 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.523736] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1502.523736] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52389ede-dec0-374c-1582-5bc2fd9676c3" [ 1502.523736] env[62820]: _type = "Task" [ 1502.523736] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.531971] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52389ede-dec0-374c-1582-5bc2fd9676c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.617840] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.630587] env[62820]: DEBUG nova.compute.manager [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1502.631280] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1502.631280] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-331ab2bf-62d9-423e-acb2-90865ee38a86 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.641125] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1502.641125] env[62820]: value = "task-1695616" [ 1502.641125] env[62820]: _type = "Task" [ 1502.641125] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.658364] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695616, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.950132] env[62820]: DEBUG nova.compute.utils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1502.951709] env[62820]: DEBUG nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1502.952159] env[62820]: DEBUG nova.network.neutron [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1502.959026] env[62820]: DEBUG nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1503.034325] env[62820]: DEBUG nova.policy [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18246bae0222415c96ec5b252cf5bd6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57b0c64a8704e7aaeba4011866c7a24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1503.045662] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52389ede-dec0-374c-1582-5bc2fd9676c3, 'name': SearchDatastore_Task, 'duration_secs': 0.010519} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.046311] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.047351] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9114a81d-86a9-493b-9c07-c4724a0588ac/9114a81d-86a9-493b-9c07-c4724a0588ac.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1503.047351] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc8ae31a-ab99-4c78-91b0-20acdaf28447 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.058914] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1503.058914] env[62820]: value = "task-1695617" [ 1503.058914] env[62820]: _type = "Task" [ 1503.058914] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.073663] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.074063] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquiring lock "ee188979-e740-4125-a17f-1c02ef9588f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.074342] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "ee188979-e740-4125-a17f-1c02ef9588f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.074548] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquiring lock "ee188979-e740-4125-a17f-1c02ef9588f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.074738] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "ee188979-e740-4125-a17f-1c02ef9588f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.074980] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "ee188979-e740-4125-a17f-1c02ef9588f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.078461] env[62820]: INFO nova.compute.manager [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Terminating instance [ 1503.153221] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695616, 'name': PowerOffVM_Task, 'duration_secs': 0.222719} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.154165] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1503.154165] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1503.154165] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353535', 'volume_id': 'ff20d603-5fe3-41ec-814a-a9f0253392bb', 'name': 'volume-ff20d603-5fe3-41ec-814a-a9f0253392bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '09ab63ae-fd36-4915-8c59-9d9bc5833288', 'attached_at': '', 'detached_at': '', 'volume_id': 'ff20d603-5fe3-41ec-814a-a9f0253392bb', 'serial': 'ff20d603-5fe3-41ec-814a-a9f0253392bb'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1503.158213] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a737226a-f948-4e95-b930-80543010b01a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.191203] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09203695-a8d5-44cd-8eba-9dfa3b094b36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.199039] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0ef559-30c7-4ba1-be40-ec0e9bc9f750 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.202706] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquiring lock "c06e3dcd-b997-497c-865d-5f277695cd7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.202706] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "c06e3dcd-b997-497c-865d-5f277695cd7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.202706] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquiring lock "c06e3dcd-b997-497c-865d-5f277695cd7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.202706] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "c06e3dcd-b997-497c-865d-5f277695cd7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.202706] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "c06e3dcd-b997-497c-865d-5f277695cd7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.204915] env[62820]: INFO nova.compute.manager [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Terminating instance [ 1503.227747] env[62820]: DEBUG nova.compute.manager [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1503.227747] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1503.229251] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab316f2-926f-4892-87e1-5ed956597d58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.233203] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61900c23-530d-48a1-825c-710c64345aa7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.250992] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] The volume has not been displaced from its original location: [datastore1] volume-ff20d603-5fe3-41ec-814a-a9f0253392bb/volume-ff20d603-5fe3-41ec-814a-a9f0253392bb.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1503.257139] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Reconfiguring VM instance instance-0000002c to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1503.263292] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38a3bdd0-0f14-4fab-bab0-aef7b40ba627 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.276542] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1503.277449] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4eebed20-f5ce-4287-b32b-d7fb1ca473a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.286807] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1503.286807] env[62820]: value = "task-1695619" [ 1503.286807] env[62820]: _type = "Task" [ 1503.286807] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.288356] env[62820]: DEBUG oslo_vmware.api [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1503.288356] env[62820]: value = "task-1695618" [ 1503.288356] env[62820]: _type = "Task" [ 1503.288356] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.309089] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695619, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.312238] env[62820]: DEBUG oslo_vmware.api [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.463930] env[62820]: DEBUG nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1503.489363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.546481] env[62820]: DEBUG nova.network.neutron [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Successfully created port: b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1503.570821] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695617, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.582317] env[62820]: DEBUG nova.compute.manager [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1503.582317] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1503.584153] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0c3ad2-a32f-4206-b56a-6d4526d852f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.592083] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa40d8a-1260-47a9-8a4e-c62994318bcf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.607704] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad4b736-76ae-4a8f-8d56-f984dd7be931 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.610375] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1503.610696] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2bcb6779-ba19-4a1b-a059-4a915d68d866 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.618786] env[62820]: DEBUG oslo_vmware.api [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1503.618786] env[62820]: value = "task-1695620" [ 1503.618786] env[62820]: _type = "Task" [ 1503.618786] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.653027] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1eeb3a-36bc-4f5c-8a6d-f9ac93b4772c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.662119] env[62820]: DEBUG oslo_vmware.api [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.665742] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6e71cd-aa2e-4b51-ae14-e28f47f8c46b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.680822] env[62820]: DEBUG nova.compute.provider_tree [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1503.805269] env[62820]: DEBUG oslo_vmware.api [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695618, 'name': PowerOffVM_Task, 'duration_secs': 0.248061} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.810098] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1503.810293] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1503.810672] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695619, 'name': ReconfigVM_Task, 'duration_secs': 0.284509} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.810916] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-003eb70d-85b4-40e2-b3f9-20d18ef05121 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.813028] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Reconfigured VM instance instance-0000002c to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1503.817829] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5786965d-f4c8-4f6b-987c-6c1dcb3157c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.835747] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1503.835747] env[62820]: value = "task-1695622" [ 1503.835747] env[62820]: _type = "Task" [ 1503.835747] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.846523] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695622, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.913102] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1503.913102] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1503.913102] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Deleting the datastore file [datastore1] c06e3dcd-b997-497c-865d-5f277695cd7a {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1503.913102] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35180d42-aabe-43f2-960c-863711cf7b73 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.922022] env[62820]: DEBUG oslo_vmware.api [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for the task: (returnval){ [ 1503.922022] env[62820]: value = "task-1695623" [ 1503.922022] env[62820]: _type = "Task" [ 1503.922022] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.930458] env[62820]: DEBUG oslo_vmware.api [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.073385] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695617, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570867} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.073965] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9114a81d-86a9-493b-9c07-c4724a0588ac/9114a81d-86a9-493b-9c07-c4724a0588ac.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1504.074394] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1504.074882] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7833af6-e004-434c-9ec1-179216cd4eb4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.085502] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1504.085502] env[62820]: value = "task-1695624" [ 1504.085502] env[62820]: _type = "Task" [ 1504.085502] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.102539] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.160018] env[62820]: DEBUG oslo_vmware.api [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695620, 'name': PowerOffVM_Task, 'duration_secs': 0.219734} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.160018] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1504.160018] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1504.162085] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc1d0154-263a-407b-9795-6a211bc55ef5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.184068] env[62820]: DEBUG nova.scheduler.client.report [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1504.348839] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695622, 'name': ReconfigVM_Task, 'duration_secs': 0.203497} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.349294] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353535', 'volume_id': 'ff20d603-5fe3-41ec-814a-a9f0253392bb', 'name': 'volume-ff20d603-5fe3-41ec-814a-a9f0253392bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '09ab63ae-fd36-4915-8c59-9d9bc5833288', 'attached_at': '', 'detached_at': '', 'volume_id': 'ff20d603-5fe3-41ec-814a-a9f0253392bb', 'serial': 'ff20d603-5fe3-41ec-814a-a9f0253392bb'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1504.349735] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1504.351038] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab5211e-009e-4515-b4d6-b9721ba94d9e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.361376] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1504.361749] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adb33261-5255-4679-8e56-6fe74720a501 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.377572] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1504.377816] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1504.377964] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Deleting the datastore file [datastore1] ee188979-e740-4125-a17f-1c02ef9588f1 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1504.378261] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e0c9ee5-1d38-48ab-8587-9aa47f7c3c53 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.384778] env[62820]: DEBUG oslo_vmware.api [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for the task: (returnval){ [ 1504.384778] env[62820]: value = "task-1695627" [ 1504.384778] env[62820]: _type = "Task" [ 1504.384778] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.394735] env[62820]: DEBUG oslo_vmware.api [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695627, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.431704] env[62820]: DEBUG oslo_vmware.api [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Task: {'id': task-1695623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200499} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.431704] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1504.431937] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1504.432083] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1504.432342] env[62820]: INFO nova.compute.manager [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1504.432681] env[62820]: DEBUG oslo.service.loopingcall [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.432956] env[62820]: DEBUG nova.compute.manager [-] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1504.433090] env[62820]: DEBUG nova.network.neutron [-] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1504.477097] env[62820]: DEBUG nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1504.494285] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1504.494575] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1504.494786] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleting the datastore file [datastore1] 09ab63ae-fd36-4915-8c59-9d9bc5833288 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1504.495332] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e453adf7-fd1c-41a9-93e3-ab304dc922c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.503615] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1504.503615] env[62820]: value = "task-1695628" [ 1504.503615] env[62820]: _type = "Task" [ 1504.503615] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.513050] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695628, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.539205] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1504.539205] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1504.539205] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1504.539205] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1504.539205] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1504.541105] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1504.541381] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1504.541559] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1504.541737] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1504.541977] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1504.542194] env[62820]: DEBUG nova.virt.hardware [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1504.543237] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba476f3-c144-4bdf-85be-afef4f58d85d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.555316] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ceb7915-f172-4113-b4af-3fac5bc836ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.596749] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.355481} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.597103] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1504.597933] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e740bbbb-87a9-47d5-a3fd-25978a92895c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.624280] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 9114a81d-86a9-493b-9c07-c4724a0588ac/9114a81d-86a9-493b-9c07-c4724a0588ac.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1504.624688] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-063a2d30-fb11-4adb-973b-e473c188292a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.648027] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1504.648027] env[62820]: value = "task-1695629" [ 1504.648027] env[62820]: _type = "Task" [ 1504.648027] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.657440] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.686986] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "210277a2-dd10-4e08-8627-4b025a554410" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.687273] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.692032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.252s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.695050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.808s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.695302] env[62820]: DEBUG nova.objects.instance [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lazy-loading 'resources' on Instance uuid 706d42cd-53d9-4976-bc67-98816a40fff4 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.713798] env[62820]: INFO nova.scheduler.client.report [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleted allocations for instance e45cdcfb-f2ce-4798-8e97-1c3f95e61db3 [ 1504.808767] env[62820]: DEBUG nova.compute.manager [req-ecaa7727-0e00-4662-a453-f1185c4383bd req-0c5d534b-f052-43b7-a0e2-73287c9597f8 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Received event network-vif-deleted-b1412dd4-3e38-4763-a38d-3ebff9f8f873 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1504.808988] env[62820]: INFO nova.compute.manager [req-ecaa7727-0e00-4662-a453-f1185c4383bd req-0c5d534b-f052-43b7-a0e2-73287c9597f8 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Neutron deleted interface b1412dd4-3e38-4763-a38d-3ebff9f8f873; detaching it from the instance and deleting it from the info cache [ 1504.809220] env[62820]: DEBUG nova.network.neutron [req-ecaa7727-0e00-4662-a453-f1185c4383bd req-0c5d534b-f052-43b7-a0e2-73287c9597f8 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.895709] env[62820]: DEBUG oslo_vmware.api [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Task: {'id': task-1695627, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197009} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.895968] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1504.896199] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1504.896380] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1504.896619] env[62820]: INFO nova.compute.manager [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1504.896864] env[62820]: DEBUG oslo.service.loopingcall [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.897068] env[62820]: DEBUG nova.compute.manager [-] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1504.897168] env[62820]: DEBUG nova.network.neutron [-] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1505.015337] env[62820]: DEBUG oslo_vmware.api [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695628, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187208} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.015828] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1505.016240] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1505.016513] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1505.016712] env[62820]: INFO nova.compute.manager [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Took 2.39 seconds to destroy the instance on the hypervisor. [ 1505.016976] env[62820]: DEBUG oslo.service.loopingcall [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1505.017187] env[62820]: DEBUG nova.compute.manager [-] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1505.017470] env[62820]: DEBUG nova.network.neutron [-] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1505.160133] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695629, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.224249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a23538bd-f4d4-4be8-bd8a-0af1e2afcba7 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "e45cdcfb-f2ce-4798-8e97-1c3f95e61db3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.738s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.280629] env[62820]: DEBUG nova.network.neutron [-] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.315173] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3860786-e967-4505-90d9-1739b74996e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.326322] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1aa4ba1-d14c-467a-97eb-edf7a74203c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.371140] env[62820]: DEBUG nova.compute.manager [req-ecaa7727-0e00-4662-a453-f1185c4383bd req-0c5d534b-f052-43b7-a0e2-73287c9597f8 service nova] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Detach interface failed, port_id=b1412dd4-3e38-4763-a38d-3ebff9f8f873, reason: Instance c06e3dcd-b997-497c-865d-5f277695cd7a could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1505.630627] env[62820]: DEBUG nova.network.neutron [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Successfully updated port: b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1505.659993] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695629, 'name': ReconfigVM_Task, 'duration_secs': 0.629717} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.662636] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 9114a81d-86a9-493b-9c07-c4724a0588ac/9114a81d-86a9-493b-9c07-c4724a0588ac.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1505.664027] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7fddeac3-bbc6-4378-a5f8-af51fd700c1a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.673822] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1505.673822] env[62820]: value = "task-1695630" [ 1505.673822] env[62820]: _type = "Task" [ 1505.673822] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.678559] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee64751c-53c9-4412-afc9-e7f6ade7cf62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.686930] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695630, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.689579] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0149df-9261-4adf-8509-78ca16f8fa17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.722845] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e91c03-8511-444f-ab06-d0b3504c314f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.732296] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068a338e-0c80-4742-8e62-f8fd583f5b9d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.749255] env[62820]: DEBUG nova.compute.provider_tree [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1505.786405] env[62820]: INFO nova.compute.manager [-] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Took 1.35 seconds to deallocate network for instance. [ 1505.927946] env[62820]: DEBUG nova.network.neutron [-] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.047771] env[62820]: DEBUG nova.network.neutron [-] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.138237] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.138237] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.138237] env[62820]: DEBUG nova.network.neutron [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1506.185095] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695630, 'name': Rename_Task, 'duration_secs': 0.230644} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.185358] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1506.185605] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2091febb-c0ad-4863-b864-cc7d1b44043b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.194116] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1506.194116] env[62820]: value = "task-1695631" [ 1506.194116] env[62820]: _type = "Task" [ 1506.194116] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.202630] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695631, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.252425] env[62820]: DEBUG nova.scheduler.client.report [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1506.292729] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.432347] env[62820]: INFO nova.compute.manager [-] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Took 1.53 seconds to deallocate network for instance. [ 1506.550703] env[62820]: INFO nova.compute.manager [-] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Took 1.53 seconds to deallocate network for instance. [ 1506.678394] env[62820]: DEBUG nova.network.neutron [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1506.706909] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695631, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.711540] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.711796] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.758959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.063s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.760421] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.740s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.762065] env[62820]: INFO nova.compute.claims [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1506.782595] env[62820]: INFO nova.scheduler.client.report [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted allocations for instance 706d42cd-53d9-4976-bc67-98816a40fff4 [ 1506.839263] env[62820]: DEBUG nova.compute.manager [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Received event network-vif-plugged-b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1506.839499] env[62820]: DEBUG oslo_concurrency.lockutils [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] Acquiring lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.839712] env[62820]: DEBUG oslo_concurrency.lockutils [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.839883] env[62820]: DEBUG oslo_concurrency.lockutils [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.840293] env[62820]: DEBUG nova.compute.manager [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] No waiting events found dispatching network-vif-plugged-b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1506.840523] env[62820]: WARNING nova.compute.manager [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Received unexpected event network-vif-plugged-b234cdf0-fffd-452d-a277-6df15c22fa06 for instance with vm_state building and task_state spawning. [ 1506.840700] env[62820]: DEBUG nova.compute.manager [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Received event network-vif-deleted-0560ed3a-b2f5-4e20-bedc-db38149bd216 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1506.840872] env[62820]: DEBUG nova.compute.manager [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Received event network-vif-deleted-d056da46-0da8-4d3b-b8a9-0255f08e1a3b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1506.841054] env[62820]: DEBUG nova.compute.manager [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Received event network-changed-b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1506.841212] env[62820]: DEBUG nova.compute.manager [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Refreshing instance network info cache due to event network-changed-b234cdf0-fffd-452d-a277-6df15c22fa06. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1506.841377] env[62820]: DEBUG oslo_concurrency.lockutils [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.865469] env[62820]: DEBUG nova.network.neutron [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.938763] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.101171] env[62820]: INFO nova.compute.manager [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Took 0.55 seconds to detach 1 volumes for instance. [ 1507.205213] env[62820]: DEBUG oslo_vmware.api [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695631, 'name': PowerOnVM_Task, 'duration_secs': 0.868611} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.205485] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1507.205671] env[62820]: INFO nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1507.205851] env[62820]: DEBUG nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1507.206647] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de1723f-2660-413b-a3fc-9d0c44e4947d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.290993] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3ee46388-4ece-43c9-a9fb-7fdd16310a25 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "706d42cd-53d9-4976-bc67-98816a40fff4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.535s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.368331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.368658] env[62820]: DEBUG nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Instance network_info: |[{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1507.368963] env[62820]: DEBUG oslo_concurrency.lockutils [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.369169] env[62820]: DEBUG nova.network.neutron [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Refreshing network info cache for port b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1507.370254] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:6e:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b234cdf0-fffd-452d-a277-6df15c22fa06', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1507.377626] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Creating folder: Project (c57b0c64a8704e7aaeba4011866c7a24). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1507.380747] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-efc868cd-1bb7-4977-87e9-1a7273eed7f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.393931] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Created folder: Project (c57b0c64a8704e7aaeba4011866c7a24) in parent group-v353379. [ 1507.394115] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Creating folder: Instances. Parent ref: group-v353540. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1507.394357] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14f6e175-8365-484a-aa94-8e7f8ec4e03f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.404162] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Created folder: Instances in parent group-v353540. [ 1507.404400] env[62820]: DEBUG oslo.service.loopingcall [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.404583] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1507.404780] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea37f7be-1217-42cd-8597-82d3a9190877 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.425849] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1507.425849] env[62820]: value = "task-1695634" [ 1507.425849] env[62820]: _type = "Task" [ 1507.425849] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.433443] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695634, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.555932] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "58a26c98-cbf9-491f-8d2c-20281c3d7771" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.556231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "58a26c98-cbf9-491f-8d2c-20281c3d7771" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.556549] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "58a26c98-cbf9-491f-8d2c-20281c3d7771-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.556764] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "58a26c98-cbf9-491f-8d2c-20281c3d7771-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.557062] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "58a26c98-cbf9-491f-8d2c-20281c3d7771-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.559633] env[62820]: INFO nova.compute.manager [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Terminating instance [ 1507.607263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.608995] env[62820]: DEBUG nova.network.neutron [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updated VIF entry in instance network info cache for port b234cdf0-fffd-452d-a277-6df15c22fa06. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1507.609337] env[62820]: DEBUG nova.network.neutron [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.726403] env[62820]: INFO nova.compute.manager [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Took 40.53 seconds to build instance. [ 1507.942308] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695634, 'name': CreateVM_Task, 'duration_secs': 0.383228} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.944892] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1507.946114] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.946114] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.946212] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1507.949898] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91ad98a5-75c0-4e71-9c4c-c1aa73932920 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.952243] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1507.952243] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cc3d8d-60a7-a857-c9be-72d47143b877" [ 1507.952243] env[62820]: _type = "Task" [ 1507.952243] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.962827] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cc3d8d-60a7-a857-c9be-72d47143b877, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.064972] env[62820]: DEBUG nova.compute.manager [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1508.065228] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1508.066092] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb85439-d118-4174-9f9c-5f74ccfeea96 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.074366] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1508.074621] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b22605ec-8b8a-47ba-bc8e-e61388de6c46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.083585] env[62820]: DEBUG oslo_vmware.api [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1508.083585] env[62820]: value = "task-1695635" [ 1508.083585] env[62820]: _type = "Task" [ 1508.083585] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.094550] env[62820]: DEBUG oslo_vmware.api [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.111446] env[62820]: DEBUG oslo_concurrency.lockutils [req-8c2412be-3db0-4561-b3cc-0f24f009758d req-d925bb87-a209-477e-97ee-4a1b89c29975 service nova] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.122897] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a293423-3d73-4aee-aaa8-40de356d75f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.131761] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b21d5d-6f71-492e-b4a8-ce7d5f39f6a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.163851] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d76e1e5-4223-4a50-b385-3ac3c232fff6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.172489] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b182129-3f77-4c55-88e6-47f072c43a87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.187225] env[62820]: DEBUG nova.compute.provider_tree [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.228201] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa985a22-bb0a-4df8-bae5-890945172b28 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "9114a81d-86a9-493b-9c07-c4724a0588ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.046s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.463543] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cc3d8d-60a7-a857-c9be-72d47143b877, 'name': SearchDatastore_Task, 'duration_secs': 0.011481} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.463860] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.464109] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1508.464344] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.464528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.464718] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1508.464983] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a37a2fc0-8dde-4a38-b484-07f881b2bfcd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.474639] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1508.474843] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1508.475573] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d78f8f15-7c79-4ced-8823-cb25d7d75f46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.481460] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1508.481460] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523f927d-4d08-ef56-9325-a027b93deb09" [ 1508.481460] env[62820]: _type = "Task" [ 1508.481460] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.489198] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523f927d-4d08-ef56-9325-a027b93deb09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.595499] env[62820]: DEBUG oslo_vmware.api [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695635, 'name': PowerOffVM_Task, 'duration_secs': 0.194249} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.595783] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1508.595944] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1508.596219] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a1d9316-1693-4570-9bb6-7942db294cd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.690301] env[62820]: DEBUG nova.scheduler.client.report [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1508.731744] env[62820]: DEBUG nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1508.798391] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1508.798796] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1508.799070] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleting the datastore file [datastore1] 58a26c98-cbf9-491f-8d2c-20281c3d7771 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1508.799364] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a78dda33-396b-49ea-a1fc-2f2f12a23b6a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.806638] env[62820]: DEBUG oslo_vmware.api [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1508.806638] env[62820]: value = "task-1695637" [ 1508.806638] env[62820]: _type = "Task" [ 1508.806638] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.815509] env[62820]: DEBUG oslo_vmware.api [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.991902] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523f927d-4d08-ef56-9325-a027b93deb09, 'name': SearchDatastore_Task, 'duration_secs': 0.010469} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.992948] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebea826b-dfea-43a5-a083-5f3014360dac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.998649] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1508.998649] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52df1221-1d52-2632-239f-13cb846160dc" [ 1508.998649] env[62820]: _type = "Task" [ 1508.998649] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.006434] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52df1221-1d52-2632-239f-13cb846160dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.195557] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.196230] env[62820]: DEBUG nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1509.198998] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.883s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.199204] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.199362] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1509.199669] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.492s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.201137] env[62820]: INFO nova.compute.claims [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1509.204734] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c39860-d0a8-4974-9ed2-c90d1f24aa15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.214851] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f40571-d04b-4a2a-a74d-64ce9e3f0cf3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.231788] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f55757e-64f9-4c56-9816-85028569c8ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.243366] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74257bcf-f458-420f-aca0-57e42a374bb4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.279938] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179289MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1509.280148] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.281513] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.299591] env[62820]: DEBUG nova.compute.manager [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1509.300535] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fcabd9-bb85-4891-9890-5409c5d2ac41 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.320115] env[62820]: DEBUG oslo_vmware.api [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161382} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.320404] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1509.320604] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1509.320785] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1509.320960] env[62820]: INFO nova.compute.manager [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1509.321243] env[62820]: DEBUG oslo.service.loopingcall [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1509.321448] env[62820]: DEBUG nova.compute.manager [-] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1509.321561] env[62820]: DEBUG nova.network.neutron [-] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1509.521182] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52df1221-1d52-2632-239f-13cb846160dc, 'name': SearchDatastore_Task, 'duration_secs': 0.0098} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.523324] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.523324] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b89d32f8-0675-4b0c-977e-b7900e62bdd8/b89d32f8-0675-4b0c-977e-b7900e62bdd8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1509.523324] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-716ac290-0e78-45d8-ac55-c453f4057ef6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.535808] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1509.535808] env[62820]: value = "task-1695638" [ 1509.535808] env[62820]: _type = "Task" [ 1509.535808] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.549169] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695638, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.706054] env[62820]: DEBUG nova.compute.utils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1509.707808] env[62820]: DEBUG nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1509.707991] env[62820]: DEBUG nova.network.neutron [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1509.730048] env[62820]: DEBUG nova.compute.manager [req-2c6ba960-db26-4748-bf2f-6d60afe7082a req-806ccba2-0de8-44e0-94df-bbe84eaf9eba service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Received event network-vif-deleted-66676266-bbc8-4add-aeb0-77fc22873d87 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1509.730692] env[62820]: INFO nova.compute.manager [req-2c6ba960-db26-4748-bf2f-6d60afe7082a req-806ccba2-0de8-44e0-94df-bbe84eaf9eba service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Neutron deleted interface 66676266-bbc8-4add-aeb0-77fc22873d87; detaching it from the instance and deleting it from the info cache [ 1509.731198] env[62820]: DEBUG nova.network.neutron [req-2c6ba960-db26-4748-bf2f-6d60afe7082a req-806ccba2-0de8-44e0-94df-bbe84eaf9eba service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.756935] env[62820]: DEBUG nova.policy [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6f2dd7e4b5a4484a9aef4d51061c7d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8561ded662f04b3eb420b60ca3345771', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1509.815741] env[62820]: INFO nova.compute.manager [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] instance snapshotting [ 1509.821837] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20c2808-c539-4921-b85b-f867a759bb1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.851530] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569ca97b-107f-4e31-81f1-4b1702fddeea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.048085] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695638, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.130694] env[62820]: DEBUG nova.network.neutron [-] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.183608] env[62820]: DEBUG nova.network.neutron [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Successfully created port: cf61248a-4d10-4c98-9e28-b142c204a810 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1510.211180] env[62820]: DEBUG nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1510.235839] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6cac32db-4d97-434e-9ad3-0f52dcbab937 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.250263] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478e8da6-0f21-4ab7-9498-6fae97fcb28e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.294684] env[62820]: DEBUG nova.compute.manager [req-2c6ba960-db26-4748-bf2f-6d60afe7082a req-806ccba2-0de8-44e0-94df-bbe84eaf9eba service nova] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Detach interface failed, port_id=66676266-bbc8-4add-aeb0-77fc22873d87, reason: Instance 58a26c98-cbf9-491f-8d2c-20281c3d7771 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1510.365972] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1510.366320] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e60eaaec-c999-4913-89ac-9f9108c15442 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.374864] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1510.374864] env[62820]: value = "task-1695639" [ 1510.374864] env[62820]: _type = "Task" [ 1510.374864] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.388983] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695639, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.552199] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695638, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652772} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.552474] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b89d32f8-0675-4b0c-977e-b7900e62bdd8/b89d32f8-0675-4b0c-977e-b7900e62bdd8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1510.552690] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1510.552951] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c8c98f5-e7bf-41b4-9f81-b2a0b1915555 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.571687] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1510.571687] env[62820]: value = "task-1695640" [ 1510.571687] env[62820]: _type = "Task" [ 1510.571687] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.582095] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695640, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.633522] env[62820]: INFO nova.compute.manager [-] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Took 1.31 seconds to deallocate network for instance. [ 1510.717511] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e195e3a-95a2-4191-8877-a75af5e6363a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.736673] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3344a67c-84eb-4987-ad99-62a74b85c0e5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.796161] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592ad157-c384-411b-a36c-45095748665a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.808745] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975fa1e4-32b6-41fb-a46e-eb925195fa14 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.827929] env[62820]: DEBUG nova.compute.provider_tree [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1510.886619] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695639, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.082546] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695640, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.143381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.243909] env[62820]: DEBUG nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1511.268533] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1511.268835] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1511.269053] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1511.269280] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1511.269466] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1511.269647] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1511.269898] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1511.270106] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1511.270321] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1511.270544] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1511.270726] env[62820]: DEBUG nova.virt.hardware [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1511.271692] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca6aa01-969a-41e9-8a4d-01a73e69fd36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.281205] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9bcee2-6e55-4a60-91b8-262993c3d68a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.331238] env[62820]: DEBUG nova.scheduler.client.report [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1511.386754] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695639, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.582838] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695640, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.800652] env[62820]: DEBUG nova.compute.manager [req-88a550ac-f7ca-4e2e-bfce-c8c5dff5d04d req-04e35493-d908-4a01-b3e4-38978bc06f33 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Received event network-vif-plugged-cf61248a-4d10-4c98-9e28-b142c204a810 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1511.800882] env[62820]: DEBUG oslo_concurrency.lockutils [req-88a550ac-f7ca-4e2e-bfce-c8c5dff5d04d req-04e35493-d908-4a01-b3e4-38978bc06f33 service nova] Acquiring lock "7e4596bf-a8b0-4502-b80b-da372d1fba06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.801107] env[62820]: DEBUG oslo_concurrency.lockutils [req-88a550ac-f7ca-4e2e-bfce-c8c5dff5d04d req-04e35493-d908-4a01-b3e4-38978bc06f33 service nova] Lock "7e4596bf-a8b0-4502-b80b-da372d1fba06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.801383] env[62820]: DEBUG oslo_concurrency.lockutils [req-88a550ac-f7ca-4e2e-bfce-c8c5dff5d04d req-04e35493-d908-4a01-b3e4-38978bc06f33 service nova] Lock "7e4596bf-a8b0-4502-b80b-da372d1fba06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.801515] env[62820]: DEBUG nova.compute.manager [req-88a550ac-f7ca-4e2e-bfce-c8c5dff5d04d req-04e35493-d908-4a01-b3e4-38978bc06f33 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] No waiting events found dispatching network-vif-plugged-cf61248a-4d10-4c98-9e28-b142c204a810 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1511.801611] env[62820]: WARNING nova.compute.manager [req-88a550ac-f7ca-4e2e-bfce-c8c5dff5d04d req-04e35493-d908-4a01-b3e4-38978bc06f33 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Received unexpected event network-vif-plugged-cf61248a-4d10-4c98-9e28-b142c204a810 for instance with vm_state building and task_state spawning. [ 1511.815924] env[62820]: DEBUG nova.network.neutron [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Successfully updated port: cf61248a-4d10-4c98-9e28-b142c204a810 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1511.836111] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.636s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.836558] env[62820]: DEBUG nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1511.839416] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.209s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.839889] env[62820]: DEBUG nova.objects.instance [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lazy-loading 'resources' on Instance uuid 498236b7-3688-4ab1-a604-a9737ba058e8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1511.887263] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695639, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.083199] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695640, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.318756] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "refresh_cache-7e4596bf-a8b0-4502-b80b-da372d1fba06" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.319031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquired lock "refresh_cache-7e4596bf-a8b0-4502-b80b-da372d1fba06" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.319303] env[62820]: DEBUG nova.network.neutron [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.344951] env[62820]: DEBUG nova.compute.utils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1512.346710] env[62820]: DEBUG nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1512.346881] env[62820]: DEBUG nova.network.neutron [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1512.389787] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695639, 'name': CreateSnapshot_Task, 'duration_secs': 1.870488} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.390075] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1512.390819] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfe3fb2-3e29-4e1b-b176-f9db25105da1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.404365] env[62820]: DEBUG nova.policy [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3315ddc593dd40d3bc97ab71be7c802e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '573f482dc303432aba8d20980da241ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1512.588399] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695640, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.580995} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.588753] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1512.589489] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2bbe55-dd4c-47e2-a32c-c71678502862 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.613198] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] b89d32f8-0675-4b0c-977e-b7900e62bdd8/b89d32f8-0675-4b0c-977e-b7900e62bdd8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1512.613700] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a391ca1c-d8f7-4a5a-8176-62e89c5b99c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.640473] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1512.640473] env[62820]: value = "task-1695641" [ 1512.640473] env[62820]: _type = "Task" [ 1512.640473] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.661278] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695641, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.759526] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4dfa92-0f9b-499d-b820-c49b6786c927 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.767710] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79fc351-1e3f-4f94-bf88-207a96b52ba8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.808666] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf91ecc-a47a-4c55-a488-fd8c859b154d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.818824] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5f9f73-b313-42a9-ba0e-fe847e643cbb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.840107] env[62820]: DEBUG nova.compute.provider_tree [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1512.850202] env[62820]: DEBUG nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1512.870607] env[62820]: DEBUG nova.network.neutron [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Successfully created port: fb9a90bf-d141-401b-84c1-af8a103dc37e {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1512.891756] env[62820]: DEBUG nova.network.neutron [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1512.915501] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1512.918939] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cd666901-988e-47b9-b06b-867492335a90 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.930170] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1512.930170] env[62820]: value = "task-1695642" [ 1512.930170] env[62820]: _type = "Task" [ 1512.930170] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.938624] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695642, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.151670] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695641, 'name': ReconfigVM_Task, 'duration_secs': 0.284593} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.152078] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Reconfigured VM instance instance-00000033 to attach disk [datastore1] b89d32f8-0675-4b0c-977e-b7900e62bdd8/b89d32f8-0675-4b0c-977e-b7900e62bdd8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1513.152792] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12001abf-fc09-4f47-b1e7-50e236fb688f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.161062] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1513.161062] env[62820]: value = "task-1695643" [ 1513.161062] env[62820]: _type = "Task" [ 1513.161062] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.174991] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695643, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.344125] env[62820]: DEBUG nova.scheduler.client.report [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1513.349705] env[62820]: DEBUG nova.network.neutron [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Updating instance_info_cache with network_info: [{"id": "cf61248a-4d10-4c98-9e28-b142c204a810", "address": "fa:16:3e:d7:1f:b6", "network": {"id": "cd549f33-30d7-42c9-93f3-fef740c084f5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-998216747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561ded662f04b3eb420b60ca3345771", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf61248a-4d", "ovs_interfaceid": "cf61248a-4d10-4c98-9e28-b142c204a810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.441635] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695642, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.672412] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695643, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.842267] env[62820]: DEBUG nova.compute.manager [req-982f3493-cf24-487b-9360-3b01b0783dce req-073f8413-80da-45c5-9d76-d91108eba2b8 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Received event network-changed-cf61248a-4d10-4c98-9e28-b142c204a810 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1513.842745] env[62820]: DEBUG nova.compute.manager [req-982f3493-cf24-487b-9360-3b01b0783dce req-073f8413-80da-45c5-9d76-d91108eba2b8 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Refreshing instance network info cache due to event network-changed-cf61248a-4d10-4c98-9e28-b142c204a810. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1513.842745] env[62820]: DEBUG oslo_concurrency.lockutils [req-982f3493-cf24-487b-9360-3b01b0783dce req-073f8413-80da-45c5-9d76-d91108eba2b8 service nova] Acquiring lock "refresh_cache-7e4596bf-a8b0-4502-b80b-da372d1fba06" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1513.852230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Releasing lock "refresh_cache-7e4596bf-a8b0-4502-b80b-da372d1fba06" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.852539] env[62820]: DEBUG nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Instance network_info: |[{"id": "cf61248a-4d10-4c98-9e28-b142c204a810", "address": "fa:16:3e:d7:1f:b6", "network": {"id": "cd549f33-30d7-42c9-93f3-fef740c084f5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-998216747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561ded662f04b3eb420b60ca3345771", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf61248a-4d", "ovs_interfaceid": "cf61248a-4d10-4c98-9e28-b142c204a810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1513.853253] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.014s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.855685] env[62820]: DEBUG oslo_concurrency.lockutils [req-982f3493-cf24-487b-9360-3b01b0783dce req-073f8413-80da-45c5-9d76-d91108eba2b8 service nova] Acquired lock "refresh_cache-7e4596bf-a8b0-4502-b80b-da372d1fba06" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.855788] env[62820]: DEBUG nova.network.neutron [req-982f3493-cf24-487b-9360-3b01b0783dce req-073f8413-80da-45c5-9d76-d91108eba2b8 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Refreshing network info cache for port cf61248a-4d10-4c98-9e28-b142c204a810 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1513.857129] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:1f:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf61248a-4d10-4c98-9e28-b142c204a810', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1513.864372] env[62820]: DEBUG oslo.service.loopingcall [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1513.864602] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.102s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.864815] env[62820]: DEBUG nova.objects.instance [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'resources' on Instance uuid bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1513.871502] env[62820]: DEBUG nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1513.873825] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1513.875028] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-853f16e8-407a-43ac-9e4e-d8fa5ba75f47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.897028] env[62820]: INFO nova.scheduler.client.report [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Deleted allocations for instance 498236b7-3688-4ab1-a604-a9737ba058e8 [ 1513.911162] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1513.911317] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1513.911479] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1513.911670] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1513.911827] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1513.911973] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1513.912219] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1513.912800] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1513.913042] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1513.913229] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1513.913446] env[62820]: DEBUG nova.virt.hardware [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1513.914838] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee13103-1c5a-4993-a908-90b12d7c4582 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.919449] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1513.919449] env[62820]: value = "task-1695644" [ 1513.919449] env[62820]: _type = "Task" [ 1513.919449] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.936427] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10f0bb1-e000-425d-9cbb-52b003fcfc3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.948387] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695644, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.967222] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695642, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.162662] env[62820]: DEBUG nova.network.neutron [req-982f3493-cf24-487b-9360-3b01b0783dce req-073f8413-80da-45c5-9d76-d91108eba2b8 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Updated VIF entry in instance network info cache for port cf61248a-4d10-4c98-9e28-b142c204a810. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1514.163212] env[62820]: DEBUG nova.network.neutron [req-982f3493-cf24-487b-9360-3b01b0783dce req-073f8413-80da-45c5-9d76-d91108eba2b8 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Updating instance_info_cache with network_info: [{"id": "cf61248a-4d10-4c98-9e28-b142c204a810", "address": "fa:16:3e:d7:1f:b6", "network": {"id": "cd549f33-30d7-42c9-93f3-fef740c084f5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-998216747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561ded662f04b3eb420b60ca3345771", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf61248a-4d", "ovs_interfaceid": "cf61248a-4d10-4c98-9e28-b142c204a810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.173713] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695643, 'name': Rename_Task, 'duration_secs': 0.865796} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.174780] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1514.175048] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29518161-6950-42cb-97c9-e9fe90604847 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.183217] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1514.183217] env[62820]: value = "task-1695645" [ 1514.183217] env[62820]: _type = "Task" [ 1514.183217] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.194291] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.408419] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d133679f-af56-47bb-9de3-c69f29d9a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.008s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.409379] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 35.598s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.409606] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.409812] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.409980] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.414096] env[62820]: INFO nova.compute.manager [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Terminating instance [ 1514.434080] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695644, 'name': CreateVM_Task, 'duration_secs': 0.409481} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.437381] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1514.440643] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.440835] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.441197] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1514.442610] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d54bceb1-f937-4901-8263-abe032d844da {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.447630] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695642, 'name': CloneVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.449034] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1514.449034] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cb0174-8e16-c4e5-dafe-0dde7d1aff46" [ 1514.449034] env[62820]: _type = "Task" [ 1514.449034] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.461907] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cb0174-8e16-c4e5-dafe-0dde7d1aff46, 'name': SearchDatastore_Task, 'duration_secs': 0.011943} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.462159] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.462399] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1514.462627] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.462774] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.462950] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1514.463222] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a949200f-78ac-458d-b71d-8892a0ba4645 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.472328] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1514.472502] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1514.473183] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73acde07-86c7-4c52-a5f0-8b602accc78f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.479151] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1514.479151] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f68ac1-4e74-ba27-353f-614867c91f93" [ 1514.479151] env[62820]: _type = "Task" [ 1514.479151] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.489734] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f68ac1-4e74-ba27-353f-614867c91f93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.669270] env[62820]: DEBUG oslo_concurrency.lockutils [req-982f3493-cf24-487b-9360-3b01b0783dce req-073f8413-80da-45c5-9d76-d91108eba2b8 service nova] Releasing lock "refresh_cache-7e4596bf-a8b0-4502-b80b-da372d1fba06" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.697340] env[62820]: DEBUG oslo_vmware.api [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1695645, 'name': PowerOnVM_Task, 'duration_secs': 0.498002} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.697340] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1514.697340] env[62820]: INFO nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Took 10.22 seconds to spawn the instance on the hypervisor. [ 1514.697495] env[62820]: DEBUG nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1514.698351] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6343cb-1a6f-481e-b5c6-9714df889daf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.778417] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065ba56e-4acb-433a-9c08-0478b76a9934 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.789477] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cf10a0-99e4-47f6-bcb0-ead6877dd7ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.822298] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3861220a-831f-4c33-8a17-ff8e53747049 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.832869] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8a635b-136c-4ea1-a075-184de807038b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.846887] env[62820]: DEBUG nova.compute.provider_tree [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1514.870388] env[62820]: DEBUG nova.network.neutron [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Successfully updated port: fb9a90bf-d141-401b-84c1-af8a103dc37e {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1514.917849] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.917849] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquired lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.918038] env[62820]: DEBUG nova.network.neutron [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1514.944385] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695642, 'name': CloneVM_Task, 'duration_secs': 1.527049} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.944687] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Created linked-clone VM from snapshot [ 1514.945793] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc14175-b3ca-4369-8e51-2dee438222e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.954222] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Uploading image fb30f87e-9d83-41a3-a17f-e897695c418d {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1514.973200] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1514.973200] env[62820]: value = "vm-353544" [ 1514.973200] env[62820]: _type = "VirtualMachine" [ 1514.973200] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1514.973476] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-caf15573-37cd-4a34-bc09-757a2d3d9651 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.982550] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lease: (returnval){ [ 1514.982550] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cad7d8-bfd6-9563-5f0a-08855c557e9f" [ 1514.982550] env[62820]: _type = "HttpNfcLease" [ 1514.982550] env[62820]: } obtained for exporting VM: (result){ [ 1514.982550] env[62820]: value = "vm-353544" [ 1514.982550] env[62820]: _type = "VirtualMachine" [ 1514.982550] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1514.982766] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the lease: (returnval){ [ 1514.982766] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cad7d8-bfd6-9563-5f0a-08855c557e9f" [ 1514.982766] env[62820]: _type = "HttpNfcLease" [ 1514.982766] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1514.997279] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f68ac1-4e74-ba27-353f-614867c91f93, 'name': SearchDatastore_Task, 'duration_secs': 0.011193} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.999201] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1514.999201] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cad7d8-bfd6-9563-5f0a-08855c557e9f" [ 1514.999201] env[62820]: _type = "HttpNfcLease" [ 1514.999201] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1514.999392] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d9fe8c3-243e-42a5-893b-6569ee673c98 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.001551] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1515.001551] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cad7d8-bfd6-9563-5f0a-08855c557e9f" [ 1515.001551] env[62820]: _type = "HttpNfcLease" [ 1515.001551] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1515.003054] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872d6c56-b761-42ad-9dc5-79d68274a64d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.008529] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1515.008529] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ae8df5-6a53-fde9-ba4a-6a83ddcf3a67" [ 1515.008529] env[62820]: _type = "Task" [ 1515.008529] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.011632] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5267b965-3539-426f-0cf3-ad3851af1425/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1515.011800] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5267b965-3539-426f-0cf3-ad3851af1425/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1515.074601] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ae8df5-6a53-fde9-ba4a-6a83ddcf3a67, 'name': SearchDatastore_Task, 'duration_secs': 0.011307} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.075942] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.076243] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7e4596bf-a8b0-4502-b80b-da372d1fba06/7e4596bf-a8b0-4502-b80b-da372d1fba06.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1515.076850] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34138362-6638-4643-aab7-ee09c73cd6eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.085114] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1515.085114] env[62820]: value = "task-1695647" [ 1515.085114] env[62820]: _type = "Task" [ 1515.085114] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.094051] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695647, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.111155] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b9bc31b4-8969-4e3a-be50-80804372f430 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.217335] env[62820]: INFO nova.compute.manager [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Took 44.28 seconds to build instance. [ 1515.350295] env[62820]: DEBUG nova.scheduler.client.report [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1515.372667] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "refresh_cache-207efed9-20ea-4b9e-bca2-45521b41de6a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.372878] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquired lock "refresh_cache-207efed9-20ea-4b9e-bca2-45521b41de6a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.373119] env[62820]: DEBUG nova.network.neutron [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1515.420787] env[62820]: DEBUG nova.compute.utils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Can not refresh info_cache because instance was not found {{(pid=62820) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1515.424026] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.424425] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.425107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.425107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.425274] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.428130] env[62820]: INFO nova.compute.manager [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Terminating instance [ 1515.439335] env[62820]: DEBUG nova.network.neutron [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1515.532074] env[62820]: DEBUG nova.network.neutron [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.598201] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695647, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.719933] env[62820]: DEBUG oslo_concurrency.lockutils [None req-09fe9429-c637-422a-8f75-0735f17dbec1 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.795s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.855971] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.991s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.858954] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 33.490s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.886655] env[62820]: INFO nova.scheduler.client.report [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleted allocations for instance bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c [ 1515.894855] env[62820]: DEBUG nova.compute.manager [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Received event network-vif-plugged-fb9a90bf-d141-401b-84c1-af8a103dc37e {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1515.894855] env[62820]: DEBUG oslo_concurrency.lockutils [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] Acquiring lock "207efed9-20ea-4b9e-bca2-45521b41de6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.894855] env[62820]: DEBUG oslo_concurrency.lockutils [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] Lock "207efed9-20ea-4b9e-bca2-45521b41de6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.895035] env[62820]: DEBUG oslo_concurrency.lockutils [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] Lock "207efed9-20ea-4b9e-bca2-45521b41de6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.895367] env[62820]: DEBUG nova.compute.manager [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] No waiting events found dispatching network-vif-plugged-fb9a90bf-d141-401b-84c1-af8a103dc37e {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1515.895367] env[62820]: WARNING nova.compute.manager [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Received unexpected event network-vif-plugged-fb9a90bf-d141-401b-84c1-af8a103dc37e for instance with vm_state building and task_state spawning. [ 1515.895528] env[62820]: DEBUG nova.compute.manager [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Received event network-changed-fb9a90bf-d141-401b-84c1-af8a103dc37e {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1515.895686] env[62820]: DEBUG nova.compute.manager [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Refreshing instance network info cache due to event network-changed-fb9a90bf-d141-401b-84c1-af8a103dc37e. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1515.895875] env[62820]: DEBUG oslo_concurrency.lockutils [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] Acquiring lock "refresh_cache-207efed9-20ea-4b9e-bca2-45521b41de6a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.914466] env[62820]: DEBUG nova.network.neutron [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1515.933326] env[62820]: DEBUG nova.compute.manager [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1515.933696] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1515.935044] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ed8ece-86d7-4c63-9bfd-e2f22400f912 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.948756] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1515.949177] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcfcc7c4-d182-4f39-a841-ca0be9de4afd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.957507] env[62820]: DEBUG oslo_vmware.api [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1515.957507] env[62820]: value = "task-1695648" [ 1515.957507] env[62820]: _type = "Task" [ 1515.957507] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.973273] env[62820]: DEBUG oslo_vmware.api [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.034765] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Releasing lock "refresh_cache-498236b7-3688-4ab1-a604-a9737ba058e8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.035526] env[62820]: DEBUG nova.compute.manager [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1516.036077] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1516.036520] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c43586b7-138a-4661-8cfe-48fb9d14a53b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.049966] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1ba456-6cfc-4c5a-ac81-31ee4d42d2d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.090682] env[62820]: WARNING nova.virt.vmwareapi.vmops [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 498236b7-3688-4ab1-a604-a9737ba058e8 could not be found. [ 1516.090682] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1516.090914] env[62820]: INFO nova.compute.manager [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1516.091136] env[62820]: DEBUG oslo.service.loopingcall [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1516.095159] env[62820]: DEBUG nova.compute.manager [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1516.095159] env[62820]: DEBUG nova.network.neutron [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1516.105903] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695647, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541728} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.106290] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7e4596bf-a8b0-4502-b80b-da372d1fba06/7e4596bf-a8b0-4502-b80b-da372d1fba06.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1516.106517] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1516.106886] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44a55fdc-4897-426f-9297-289af2e4200a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.118073] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1516.118073] env[62820]: value = "task-1695649" [ 1516.118073] env[62820]: _type = "Task" [ 1516.118073] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.129624] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695649, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.134711] env[62820]: DEBUG nova.network.neutron [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1516.163630] env[62820]: DEBUG nova.network.neutron [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Updating instance_info_cache with network_info: [{"id": "fb9a90bf-d141-401b-84c1-af8a103dc37e", "address": "fa:16:3e:0f:21:7d", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb9a90bf-d1", "ovs_interfaceid": "fb9a90bf-d141-401b-84c1-af8a103dc37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.223021] env[62820]: DEBUG nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1516.363039] env[62820]: DEBUG nova.objects.instance [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lazy-loading 'migration_context' on Instance uuid ab21fd61-3a44-42fa-92be-51214b0a9a1e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1516.400141] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dabd098e-b612-4d47-ba06-837fef657c80 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.634s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.470234] env[62820]: DEBUG oslo_vmware.api [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695648, 'name': PowerOffVM_Task, 'duration_secs': 0.234509} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.470556] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1516.470717] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1516.470980] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25e2f5bf-1881-4d58-a7fe-0f7b40768663 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.571854] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1516.571854] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1516.571854] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Deleting the datastore file [datastore1] aa98dbb0-5ff7-4da5-a365-2b55a8bd2216 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1516.571854] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dec70fd4-1c37-44cb-b360-d82f839f0305 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.579580] env[62820]: DEBUG oslo_vmware.api [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for the task: (returnval){ [ 1516.579580] env[62820]: value = "task-1695651" [ 1516.579580] env[62820]: _type = "Task" [ 1516.579580] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.589038] env[62820]: DEBUG oslo_vmware.api [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.631813] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695649, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14194} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.633006] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1516.633924] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ae82af-d45b-4005-9071-a49c42c3b387 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.637062] env[62820]: DEBUG nova.network.neutron [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.661733] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 7e4596bf-a8b0-4502-b80b-da372d1fba06/7e4596bf-a8b0-4502-b80b-da372d1fba06.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1516.662989] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52d919c2-a9e6-4c67-9438-52293756cc14 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.679618] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Releasing lock "refresh_cache-207efed9-20ea-4b9e-bca2-45521b41de6a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.680052] env[62820]: DEBUG nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Instance network_info: |[{"id": "fb9a90bf-d141-401b-84c1-af8a103dc37e", "address": "fa:16:3e:0f:21:7d", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb9a90bf-d1", "ovs_interfaceid": "fb9a90bf-d141-401b-84c1-af8a103dc37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1516.680456] env[62820]: DEBUG oslo_concurrency.lockutils [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] Acquired lock "refresh_cache-207efed9-20ea-4b9e-bca2-45521b41de6a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.680620] env[62820]: DEBUG nova.network.neutron [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Refreshing network info cache for port fb9a90bf-d141-401b-84c1-af8a103dc37e {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1516.681904] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:21:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb9a90bf-d141-401b-84c1-af8a103dc37e', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1516.689653] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Creating folder: Project (573f482dc303432aba8d20980da241ef). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1516.691342] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4401fff-f3ec-4c4a-9d29-db574c729a91 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.697368] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1516.697368] env[62820]: value = "task-1695652" [ 1516.697368] env[62820]: _type = "Task" [ 1516.697368] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.705983] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Created folder: Project (573f482dc303432aba8d20980da241ef) in parent group-v353379. [ 1516.706142] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Creating folder: Instances. Parent ref: group-v353546. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1516.709857] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea119c1a-b25a-47d1-bea4-f9ab5232acf5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.715156] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695652, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.723719] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Created folder: Instances in parent group-v353546. [ 1516.724152] env[62820]: DEBUG oslo.service.loopingcall [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1516.724383] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1516.724686] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80be7168-642b-44b5-8ed5-74e338280dce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.752785] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1516.752785] env[62820]: value = "task-1695655" [ 1516.752785] env[62820]: _type = "Task" [ 1516.752785] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.762949] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695655, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.774277] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.827676] env[62820]: DEBUG nova.compute.manager [req-9a9aeeed-6bc0-4ccc-b9fb-6c7b165ba857 req-8f97b42f-b8af-4dba-ace1-0402321fe9eb service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Received event network-changed-b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1516.828424] env[62820]: DEBUG nova.compute.manager [req-9a9aeeed-6bc0-4ccc-b9fb-6c7b165ba857 req-8f97b42f-b8af-4dba-ace1-0402321fe9eb service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Refreshing instance network info cache due to event network-changed-b234cdf0-fffd-452d-a277-6df15c22fa06. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1516.828681] env[62820]: DEBUG oslo_concurrency.lockutils [req-9a9aeeed-6bc0-4ccc-b9fb-6c7b165ba857 req-8f97b42f-b8af-4dba-ace1-0402321fe9eb service nova] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.828915] env[62820]: DEBUG oslo_concurrency.lockutils [req-9a9aeeed-6bc0-4ccc-b9fb-6c7b165ba857 req-8f97b42f-b8af-4dba-ace1-0402321fe9eb service nova] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.829165] env[62820]: DEBUG nova.network.neutron [req-9a9aeeed-6bc0-4ccc-b9fb-6c7b165ba857 req-8f97b42f-b8af-4dba-ace1-0402321fe9eb service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Refreshing network info cache for port b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1517.090468] env[62820]: DEBUG oslo_vmware.api [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Task: {'id': task-1695651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214942} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.093461] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1517.093703] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1517.093897] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1517.094088] env[62820]: INFO nova.compute.manager [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1517.094340] env[62820]: DEBUG oslo.service.loopingcall [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.095129] env[62820]: DEBUG nova.compute.manager [-] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1517.095230] env[62820]: DEBUG nova.network.neutron [-] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1517.142478] env[62820]: INFO nova.compute.manager [-] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Took 1.05 seconds to deallocate network for instance. [ 1517.211397] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695652, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.264795] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695655, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.273239] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86058d0-3d4b-4dd0-8e86-7e79cacd50a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.282887] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2513d4b8-8e78-4b89-8b30-9a2c75e4175c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.326328] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c5ee2a-e896-41d8-9268-39b580563141 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.340040] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f705430-e4b1-43a1-aa2a-3e2e2a08a379 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.355858] env[62820]: DEBUG nova.compute.provider_tree [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1517.649471] env[62820]: INFO nova.compute.manager [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance disappeared during terminate [ 1517.649826] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c8ccf8fc-ea42-4ab6-a1f8-bd3a89d8c31e tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "498236b7-3688-4ab1-a604-a9737ba058e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.240s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.711526] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695652, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.767092] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695655, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.864466] env[62820]: DEBUG nova.scheduler.client.report [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1517.878607] env[62820]: DEBUG nova.network.neutron [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Updated VIF entry in instance network info cache for port fb9a90bf-d141-401b-84c1-af8a103dc37e. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1517.879237] env[62820]: DEBUG nova.network.neutron [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Updating instance_info_cache with network_info: [{"id": "fb9a90bf-d141-401b-84c1-af8a103dc37e", "address": "fa:16:3e:0f:21:7d", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb9a90bf-d1", "ovs_interfaceid": "fb9a90bf-d141-401b-84c1-af8a103dc37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.933038] env[62820]: DEBUG nova.compute.manager [req-d1d8dbae-88fc-43a2-86c1-3d9d71ad5b4d req-7c9c8399-a871-4c89-888e-141e0a24943c service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Received event network-vif-deleted-927b7951-0ef5-4aa5-b888-5b73266b6951 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1517.935379] env[62820]: INFO nova.compute.manager [req-d1d8dbae-88fc-43a2-86c1-3d9d71ad5b4d req-7c9c8399-a871-4c89-888e-141e0a24943c service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Neutron deleted interface 927b7951-0ef5-4aa5-b888-5b73266b6951; detaching it from the instance and deleting it from the info cache [ 1517.935710] env[62820]: DEBUG nova.network.neutron [req-d1d8dbae-88fc-43a2-86c1-3d9d71ad5b4d req-7c9c8399-a871-4c89-888e-141e0a24943c service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.021420] env[62820]: DEBUG nova.network.neutron [req-9a9aeeed-6bc0-4ccc-b9fb-6c7b165ba857 req-8f97b42f-b8af-4dba-ace1-0402321fe9eb service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updated VIF entry in instance network info cache for port b234cdf0-fffd-452d-a277-6df15c22fa06. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1518.021420] env[62820]: DEBUG nova.network.neutron [req-9a9aeeed-6bc0-4ccc-b9fb-6c7b165ba857 req-8f97b42f-b8af-4dba-ace1-0402321fe9eb service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.210066] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695652, 'name': ReconfigVM_Task, 'duration_secs': 1.018381} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.210393] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 7e4596bf-a8b0-4502-b80b-da372d1fba06/7e4596bf-a8b0-4502-b80b-da372d1fba06.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1518.211146] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76a9e0cc-4223-487c-ae9a-4e2aacb42df2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.214221] env[62820]: DEBUG nova.network.neutron [-] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.220135] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1518.220135] env[62820]: value = "task-1695656" [ 1518.220135] env[62820]: _type = "Task" [ 1518.220135] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.234719] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695656, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.265986] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695655, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.381211] env[62820]: DEBUG oslo_concurrency.lockutils [req-df72aead-5770-429c-8f86-88e542743897 req-dc3d3199-2e5c-4492-9c55-5c20b67566c4 service nova] Releasing lock "refresh_cache-207efed9-20ea-4b9e-bca2-45521b41de6a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.440058] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-024fd546-9616-4987-b106-f70a49208662 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.449991] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9453236-a18b-4556-a97c-a28b21d9d973 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.485108] env[62820]: DEBUG nova.compute.manager [req-d1d8dbae-88fc-43a2-86c1-3d9d71ad5b4d req-7c9c8399-a871-4c89-888e-141e0a24943c service nova] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Detach interface failed, port_id=927b7951-0ef5-4aa5-b888-5b73266b6951, reason: Instance aa98dbb0-5ff7-4da5-a365-2b55a8bd2216 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1518.522327] env[62820]: DEBUG oslo_concurrency.lockutils [req-9a9aeeed-6bc0-4ccc-b9fb-6c7b165ba857 req-8f97b42f-b8af-4dba-ace1-0402321fe9eb service nova] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.716679] env[62820]: INFO nova.compute.manager [-] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Took 1.62 seconds to deallocate network for instance. [ 1518.733257] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695656, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.768953] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695655, 'name': CreateVM_Task, 'duration_secs': 1.619102} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.769249] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1518.770083] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.770290] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.770658] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1518.770976] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56a8c6fc-2c71-4ac9-9285-5eee0cf155d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.777612] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1518.777612] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522506c7-f5fc-9f69-5864-a8dd7a6beb60" [ 1518.777612] env[62820]: _type = "Task" [ 1518.777612] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.787830] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522506c7-f5fc-9f69-5864-a8dd7a6beb60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.879187] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.020s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.885110] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.993s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.886547] env[62820]: INFO nova.compute.claims [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1519.227911] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.234964] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695656, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.241709] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.241937] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.289694] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522506c7-f5fc-9f69-5864-a8dd7a6beb60, 'name': SearchDatastore_Task, 'duration_secs': 0.014382} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.289959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.290322] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1519.290575] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.290790] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.291033] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1519.291314] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82c9d9d1-160b-4232-9f55-cc20b3108bad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.300417] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1519.300594] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1519.301341] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57b93d42-4bc0-49ad-a75e-dbd48718b082 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.308158] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1519.308158] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5221fc77-7703-1893-3c45-09e36e337c86" [ 1519.308158] env[62820]: _type = "Task" [ 1519.308158] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.316863] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5221fc77-7703-1893-3c45-09e36e337c86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.735814] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695656, 'name': Rename_Task, 'duration_secs': 1.197557} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.736215] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1519.736605] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aaf3575c-5d56-4cba-a3aa-4aa0a9952003 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.747111] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1519.747111] env[62820]: value = "task-1695657" [ 1519.747111] env[62820]: _type = "Task" [ 1519.747111] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.756231] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695657, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.819549] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5221fc77-7703-1893-3c45-09e36e337c86, 'name': SearchDatastore_Task, 'duration_secs': 0.009647} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.820516] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a436d9bd-c11d-4bae-ae7a-56a525f04533 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.827572] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1519.827572] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5266bb15-a493-0cb7-47a9-a1c41a8d10a2" [ 1519.827572] env[62820]: _type = "Task" [ 1519.827572] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.838678] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5266bb15-a493-0cb7-47a9-a1c41a8d10a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.259115] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695657, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.260981] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea87a50-48e8-4e1c-a084-198ba8c56d86 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.268635] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ffadeb-7bea-4db8-8b90-0de6128e3ae4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.301715] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5011c4-7907-422a-a8c9-a97daf1ad5e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.310360] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca45aa6-85c3-4e69-96da-e80ca2ffade1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.324883] env[62820]: DEBUG nova.compute.provider_tree [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1520.338674] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5266bb15-a493-0cb7-47a9-a1c41a8d10a2, 'name': SearchDatastore_Task, 'duration_secs': 0.018383} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.338956] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.339319] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 207efed9-20ea-4b9e-bca2-45521b41de6a/207efed9-20ea-4b9e-bca2-45521b41de6a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1520.339611] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b2c0648-8bfd-48f0-987a-4cd5c217980d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.348207] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1520.348207] env[62820]: value = "task-1695658" [ 1520.348207] env[62820]: _type = "Task" [ 1520.348207] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.358194] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.422694] env[62820]: INFO nova.compute.manager [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Swapping old allocation on dict_keys(['8a0693d4-1456-4a04-ae15-b1eaea0edd7a']) held by migration 69cbcaf9-61c5-420e-a1c4-1817dff9efce for instance [ 1520.447935] env[62820]: DEBUG nova.scheduler.client.report [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Overwriting current allocation {'allocations': {'8a0693d4-1456-4a04-ae15-b1eaea0edd7a': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 83}}, 'project_id': '04698d19505d400594ce250863e15456', 'user_id': 'b2a98cf26a4949abadead50c7354a638', 'consumer_generation': 1} on consumer ab21fd61-3a44-42fa-92be-51214b0a9a1e {{(pid=62820) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1520.557403] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.557614] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquired lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.557883] env[62820]: DEBUG nova.network.neutron [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1520.760514] env[62820]: DEBUG oslo_vmware.api [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695657, 'name': PowerOnVM_Task, 'duration_secs': 0.668618} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.760884] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1520.761082] env[62820]: INFO nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1520.761285] env[62820]: DEBUG nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1520.762281] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee6cd8b-17da-413c-9231-4b0d26cb9229 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.828523] env[62820]: DEBUG nova.scheduler.client.report [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1520.860717] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472771} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.861039] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 207efed9-20ea-4b9e-bca2-45521b41de6a/207efed9-20ea-4b9e-bca2-45521b41de6a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1520.861279] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1520.861780] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-804435b1-8f60-4c9b-ab13-145b9b7edd19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.869978] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1520.869978] env[62820]: value = "task-1695659" [ 1520.869978] env[62820]: _type = "Task" [ 1520.869978] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.879351] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695659, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.300035] env[62820]: INFO nova.compute.manager [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Took 48.30 seconds to build instance. [ 1521.308556] env[62820]: DEBUG nova.network.neutron [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance_info_cache with network_info: [{"id": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "address": "fa:16:3e:04:22:08", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af0a5c5-a1", "ovs_interfaceid": "5af0a5c5-a176-477e-b59a-fa82e9eea9a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.333468] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.334141] env[62820]: DEBUG nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1521.337078] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.738s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.338654] env[62820]: INFO nova.compute.claims [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1521.380854] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695659, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184645} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.381212] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1521.382020] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758f2db9-046b-4bf5-9be3-063a96f7e817 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.406888] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 207efed9-20ea-4b9e-bca2-45521b41de6a/207efed9-20ea-4b9e-bca2-45521b41de6a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1521.407461] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdf535b2-65e3-46a2-92fc-1b0a1f84e8d3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.428625] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1521.428625] env[62820]: value = "task-1695660" [ 1521.428625] env[62820]: _type = "Task" [ 1521.428625] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.437990] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.807066] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce31de1a-d8a5-448f-8bd2-c2ba63cf2f6f tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "7e4596bf-a8b0-4502-b80b-da372d1fba06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.812s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.810902] env[62820]: DEBUG oslo_concurrency.lockutils [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Releasing lock "refresh_cache-ab21fd61-3a44-42fa-92be-51214b0a9a1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.811548] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1521.811757] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5526a2e-651b-4a7c-9d16-d3ad30f825ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.820644] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1521.820644] env[62820]: value = "task-1695661" [ 1521.820644] env[62820]: _type = "Task" [ 1521.820644] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.830374] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.845520] env[62820]: DEBUG nova.compute.utils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1521.847851] env[62820]: DEBUG nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1521.848811] env[62820]: DEBUG nova.network.neutron [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1521.914636] env[62820]: DEBUG nova.policy [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f3b1396bd4e4daeb1df16f05c7d92c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cba3bf0aff2d4aedbaa9fbe886f700d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1521.940651] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695660, 'name': ReconfigVM_Task, 'duration_secs': 0.333798} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.940941] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 207efed9-20ea-4b9e-bca2-45521b41de6a/207efed9-20ea-4b9e-bca2-45521b41de6a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1521.941600] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f9203b0-d4a8-4149-b3c9-c4f51f5da9d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.949401] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1521.949401] env[62820]: value = "task-1695662" [ 1521.949401] env[62820]: _type = "Task" [ 1521.949401] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.959507] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695662, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.245594] env[62820]: DEBUG nova.network.neutron [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Successfully created port: 2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1522.312093] env[62820]: DEBUG nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1522.332773] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695661, 'name': PowerOffVM_Task, 'duration_secs': 0.403454} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.332773] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1522.333166] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='72727140-45c5-4368-9f13-c12a8d0ec9dc',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2056238791',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1522.333380] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1522.333531] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1522.335437] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1522.335437] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1522.335437] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1522.335437] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1522.335437] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1522.335437] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1522.335437] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1522.335437] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1522.341199] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ba73f72-738f-47c1-8457-f7e7dc63fad7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.353658] env[62820]: DEBUG nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1522.365578] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1522.365578] env[62820]: value = "task-1695663" [ 1522.365578] env[62820]: _type = "Task" [ 1522.365578] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.377941] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695663, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.460294] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695662, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.710463] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8caab8a-3a09-4793-aab1-6d469e400ced {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.719707] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af7a5ec-56c9-446d-9cae-6a4aaeabf12e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.753249] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e780873-9b2a-43fb-a149-f7ef4117ef09 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.762521] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5209b4-e4fe-4ee4-a130-223c5c7151a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.776122] env[62820]: DEBUG nova.compute.provider_tree [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1522.832070] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.876255] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695663, 'name': ReconfigVM_Task, 'duration_secs': 0.24453} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.877225] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c45459-51e8-47c6-a764-9caab25b47f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.898214] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='72727140-45c5-4368-9f13-c12a8d0ec9dc',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2056238791',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1522.898490] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1522.898672] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1522.898824] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1522.898979] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1522.899137] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1522.899342] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1522.899502] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1522.899664] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1522.899829] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1522.900013] env[62820]: DEBUG nova.virt.hardware [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1522.901547] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b17108d9-328b-459d-b17c-559811d22f6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.910241] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1522.910241] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521be72f-910b-f682-ad04-d28f045b00c4" [ 1522.910241] env[62820]: _type = "Task" [ 1522.910241] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.918885] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521be72f-910b-f682-ad04-d28f045b00c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.961457] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695662, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.279254] env[62820]: DEBUG nova.scheduler.client.report [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1523.366808] env[62820]: DEBUG nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1523.404761] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1523.405093] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1523.405265] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1523.405447] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1523.405591] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1523.405736] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1523.405958] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1523.406128] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1523.406296] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1523.406476] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1523.406736] env[62820]: DEBUG nova.virt.hardware [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1523.407648] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413ce1e0-be6d-4d09-a021-f2139598b7b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.424110] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521be72f-910b-f682-ad04-d28f045b00c4, 'name': SearchDatastore_Task, 'duration_secs': 0.026412} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.425635] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b41c5db-2a0d-431c-842c-b0876fe0f87a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.434946] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfiguring VM instance instance-00000023 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1523.436183] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-360fb43c-f49b-4ef9-ab03-27a217d72f19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.465719] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1523.465719] env[62820]: value = "task-1695664" [ 1523.465719] env[62820]: _type = "Task" [ 1523.465719] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.473341] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695662, 'name': Rename_Task, 'duration_secs': 1.171109} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.473619] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1523.474212] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23abd871-7746-4c16-b8d8-380ac7ad10e2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.479075] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695664, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.485614] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1523.485614] env[62820]: value = "task-1695665" [ 1523.485614] env[62820]: _type = "Task" [ 1523.485614] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.490833] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5267b965-3539-426f-0cf3-ad3851af1425/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1523.491659] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a13dd5-e0ad-4a0d-9a90-6ead1433b380 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.499015] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5267b965-3539-426f-0cf3-ad3851af1425/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1523.499172] env[62820]: ERROR oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5267b965-3539-426f-0cf3-ad3851af1425/disk-0.vmdk due to incomplete transfer. [ 1523.502028] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-40ce198d-8f8f-4747-8bb7-73758825a15f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.503553] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.509449] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5267b965-3539-426f-0cf3-ad3851af1425/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1523.509646] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Uploaded image fb30f87e-9d83-41a3-a17f-e897695c418d to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1523.512955] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1523.512955] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ca77b270-f9ba-4f38-963e-c701797f1a5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.520511] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1523.520511] env[62820]: value = "task-1695666" [ 1523.520511] env[62820]: _type = "Task" [ 1523.520511] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.529497] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695666, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.687137] env[62820]: DEBUG nova.compute.manager [req-7c7a3d96-0310-4c21-bdba-95d2f6a76bf6 req-22eb1567-e6a7-48ee-b7f1-8cae3efc052b service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Received event network-vif-plugged-2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1523.687137] env[62820]: DEBUG oslo_concurrency.lockutils [req-7c7a3d96-0310-4c21-bdba-95d2f6a76bf6 req-22eb1567-e6a7-48ee-b7f1-8cae3efc052b service nova] Acquiring lock "492db939-78f4-4642-89dd-a01fa94f41b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.687137] env[62820]: DEBUG oslo_concurrency.lockutils [req-7c7a3d96-0310-4c21-bdba-95d2f6a76bf6 req-22eb1567-e6a7-48ee-b7f1-8cae3efc052b service nova] Lock "492db939-78f4-4642-89dd-a01fa94f41b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.687137] env[62820]: DEBUG oslo_concurrency.lockutils [req-7c7a3d96-0310-4c21-bdba-95d2f6a76bf6 req-22eb1567-e6a7-48ee-b7f1-8cae3efc052b service nova] Lock "492db939-78f4-4642-89dd-a01fa94f41b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.687137] env[62820]: DEBUG nova.compute.manager [req-7c7a3d96-0310-4c21-bdba-95d2f6a76bf6 req-22eb1567-e6a7-48ee-b7f1-8cae3efc052b service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] No waiting events found dispatching network-vif-plugged-2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1523.687889] env[62820]: WARNING nova.compute.manager [req-7c7a3d96-0310-4c21-bdba-95d2f6a76bf6 req-22eb1567-e6a7-48ee-b7f1-8cae3efc052b service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Received unexpected event network-vif-plugged-2c04e03b-ab62-4610-b33b-f1d00be3b4be for instance with vm_state building and task_state spawning. [ 1523.776657] env[62820]: DEBUG nova.network.neutron [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Successfully updated port: 2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1523.785780] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.786198] env[62820]: DEBUG nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1523.789525] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.765s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.791054] env[62820]: INFO nova.compute.claims [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1523.926047] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "7e4596bf-a8b0-4502-b80b-da372d1fba06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.926354] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "7e4596bf-a8b0-4502-b80b-da372d1fba06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.926354] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "7e4596bf-a8b0-4502-b80b-da372d1fba06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.926500] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "7e4596bf-a8b0-4502-b80b-da372d1fba06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.926716] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "7e4596bf-a8b0-4502-b80b-da372d1fba06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.928920] env[62820]: INFO nova.compute.manager [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Terminating instance [ 1523.979506] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695664, 'name': ReconfigVM_Task, 'duration_secs': 0.218876} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.979809] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfigured VM instance instance-00000023 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1523.980687] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31388d51-265b-48f6-a0c0-4745e3eb397c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.009463] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e/ab21fd61-3a44-42fa-92be-51214b0a9a1e.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1524.010566] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d612d0c-fb3f-4a5d-b3ee-183a0aa84a0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.026758] env[62820]: DEBUG oslo_vmware.api [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695665, 'name': PowerOnVM_Task, 'duration_secs': 0.510565} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.032024] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1524.032024] env[62820]: INFO nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Took 10.16 seconds to spawn the instance on the hypervisor. [ 1524.032024] env[62820]: DEBUG nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1524.032024] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ea38cf-84dc-44bd-901e-b015638cd5b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.036226] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1524.036226] env[62820]: value = "task-1695667" [ 1524.036226] env[62820]: _type = "Task" [ 1524.036226] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.039780] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695666, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.052281] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695667, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.279851] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.280115] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.280291] env[62820]: DEBUG nova.network.neutron [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1524.295330] env[62820]: DEBUG nova.compute.utils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1524.298987] env[62820]: DEBUG nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1524.299101] env[62820]: DEBUG nova.network.neutron [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1524.346228] env[62820]: DEBUG nova.policy [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fc30b5328e44f21a88fbcaedafe5a2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e5642bbb5de4060be9d4d0ae0f8d6a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1524.435372] env[62820]: DEBUG nova.compute.manager [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1524.435372] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1524.436057] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a14384f-d156-4d89-85b8-a05986e832c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.445073] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1524.445073] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b44a3fa-4ee8-4289-98e9-5622fca21e57 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.450763] env[62820]: DEBUG oslo_vmware.api [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1524.450763] env[62820]: value = "task-1695668" [ 1524.450763] env[62820]: _type = "Task" [ 1524.450763] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.459799] env[62820]: DEBUG oslo_vmware.api [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695668, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.537497] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695666, 'name': Destroy_Task, 'duration_secs': 0.921996} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.537670] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Destroyed the VM [ 1524.538053] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1524.538320] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-199ecfdd-5e7b-4967-893d-173b05e7f84d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.557295] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695667, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.557944] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1524.557944] env[62820]: value = "task-1695669" [ 1524.557944] env[62820]: _type = "Task" [ 1524.557944] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.563887] env[62820]: INFO nova.compute.manager [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Took 47.87 seconds to build instance. [ 1524.574181] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695669, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.684627] env[62820]: DEBUG nova.network.neutron [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Successfully created port: 8ba6813f-c30f-416d-b888-4a33a10698ef {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1524.799899] env[62820]: DEBUG nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1524.832884] env[62820]: DEBUG nova.network.neutron [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1524.966453] env[62820]: DEBUG oslo_vmware.api [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695668, 'name': PowerOffVM_Task, 'duration_secs': 0.18704} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.966851] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1524.967025] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1524.967675] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8232539-727f-4b40-9bff-ca8225f616e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.056596] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695667, 'name': ReconfigVM_Task, 'duration_secs': 0.657986} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.056596] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Reconfigured VM instance instance-00000023 to attach disk [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e/ab21fd61-3a44-42fa-92be-51214b0a9a1e.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1525.056596] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db006fe6-e6af-4729-8df7-8ccafe2b4566 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.070459] env[62820]: DEBUG oslo_concurrency.lockutils [None req-eb3d60a3-e91c-4f43-bfd5-a2b39fba2cfa tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "207efed9-20ea-4b9e-bca2-45521b41de6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.393s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.073420] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1525.073661] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1525.073867] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Deleting the datastore file [datastore1] 7e4596bf-a8b0-4502-b80b-da372d1fba06 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1525.079900] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38013d7c-7a8d-4962-afc1-9ca00bb5be71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.102174] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb88af9-b14b-4400-9890-27fc70b9b8a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.110545] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695669, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.110713] env[62820]: DEBUG oslo_vmware.api [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1525.110713] env[62820]: value = "task-1695671" [ 1525.110713] env[62820]: _type = "Task" [ 1525.110713] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.135084] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13839c26-b3be-48a6-a801-2e9f539c947a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.140250] env[62820]: DEBUG oslo_vmware.api [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695671, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.164117] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f64b5e-0abe-4c32-b415-6d85fc24e619 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.174418] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1525.174532] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a19d2538-e63d-4857-8219-8419396629ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.185253] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1525.185253] env[62820]: value = "task-1695672" [ 1525.185253] env[62820]: _type = "Task" [ 1525.185253] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.197189] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.302127] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b99dc1-7e79-4371-854b-69036458f2a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.316508] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6a82b7-e7fd-45fa-b6ff-a8bd8169de4d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.346807] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab48c9e3-9018-4a61-86bd-c479102b48f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.357298] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965a3cc5-9847-4eeb-94a8-8ca21f661f22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.363294] env[62820]: DEBUG nova.network.neutron [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updating instance_info_cache with network_info: [{"id": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "address": "fa:16:3e:69:86:dd", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c04e03b-ab", "ovs_interfaceid": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.374314] env[62820]: DEBUG nova.compute.provider_tree [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1525.531826] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "93e1a842-d598-4798-88ad-622ae5dbf057" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.531826] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "93e1a842-d598-4798-88ad-622ae5dbf057" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.582020] env[62820]: DEBUG oslo_vmware.api [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695669, 'name': RemoveSnapshot_Task, 'duration_secs': 0.851727} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.582020] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1525.582020] env[62820]: INFO nova.compute.manager [None req-dada341b-17fd-4b85-9a90-cc0acfdd708b tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Took 15.76 seconds to snapshot the instance on the hypervisor. [ 1525.625357] env[62820]: DEBUG oslo_vmware.api [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695671, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344125} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.625504] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1525.625782] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1525.625943] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1525.626195] env[62820]: INFO nova.compute.manager [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1525.626456] env[62820]: DEBUG oslo.service.loopingcall [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1525.630106] env[62820]: DEBUG nova.compute.manager [-] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1525.630106] env[62820]: DEBUG nova.network.neutron [-] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1525.695661] env[62820]: DEBUG oslo_vmware.api [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695672, 'name': PowerOnVM_Task, 'duration_secs': 0.469132} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.695997] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1525.742254] env[62820]: DEBUG nova.compute.manager [req-fb61977e-8d23-4af1-9eeb-eedc90a67ce6 req-f6b38fde-1401-4f18-957b-045c7460a6bb service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Received event network-changed-2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1525.742785] env[62820]: DEBUG nova.compute.manager [req-fb61977e-8d23-4af1-9eeb-eedc90a67ce6 req-f6b38fde-1401-4f18-957b-045c7460a6bb service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Refreshing instance network info cache due to event network-changed-2c04e03b-ab62-4610-b33b-f1d00be3b4be. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1525.743112] env[62820]: DEBUG oslo_concurrency.lockutils [req-fb61977e-8d23-4af1-9eeb-eedc90a67ce6 req-f6b38fde-1401-4f18-957b-045c7460a6bb service nova] Acquiring lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.811725] env[62820]: DEBUG nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1525.840173] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1525.840415] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1525.840574] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1525.840880] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1525.840958] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1525.841049] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1525.841263] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1525.841504] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1525.841591] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1525.841751] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1525.841926] env[62820]: DEBUG nova.virt.hardware [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1525.842807] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebe98fd-c6f3-47f1-a4ca-752fcec210b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.853783] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa102cc-0b2f-4706-b910-067cb9e8001c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.870036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Releasing lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.870385] env[62820]: DEBUG nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Instance network_info: |[{"id": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "address": "fa:16:3e:69:86:dd", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c04e03b-ab", "ovs_interfaceid": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1525.870917] env[62820]: DEBUG oslo_concurrency.lockutils [req-fb61977e-8d23-4af1-9eeb-eedc90a67ce6 req-f6b38fde-1401-4f18-957b-045c7460a6bb service nova] Acquired lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.871114] env[62820]: DEBUG nova.network.neutron [req-fb61977e-8d23-4af1-9eeb-eedc90a67ce6 req-f6b38fde-1401-4f18-957b-045c7460a6bb service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Refreshing network info cache for port 2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1525.872439] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:86:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c04e03b-ab62-4610-b33b-f1d00be3b4be', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1525.885984] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Creating folder: Project (cba3bf0aff2d4aedbaa9fbe886f700d7). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1525.888132] env[62820]: DEBUG nova.scheduler.client.report [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1525.891724] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f10dd0e4-48a8-483c-900e-aee49f96c8d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.904856] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Created folder: Project (cba3bf0aff2d4aedbaa9fbe886f700d7) in parent group-v353379. [ 1525.905075] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Creating folder: Instances. Parent ref: group-v353549. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1525.905314] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-042712bc-0037-409d-98a5-4f0fe199fb0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.917171] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Created folder: Instances in parent group-v353549. [ 1525.917509] env[62820]: DEBUG oslo.service.loopingcall [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1525.917753] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1525.918056] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f948a47b-1442-43a5-9a5a-0e966c52d6eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.940759] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1525.940759] env[62820]: value = "task-1695675" [ 1525.940759] env[62820]: _type = "Task" [ 1525.940759] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.949196] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695675, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.033411] env[62820]: DEBUG nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1526.392659] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.393170] env[62820]: DEBUG nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1526.396107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.834s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.396348] env[62820]: DEBUG nova.objects.instance [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lazy-loading 'resources' on Instance uuid 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1526.454262] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695675, 'name': CreateVM_Task, 'duration_secs': 0.395835} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.456845] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1526.457524] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.457710] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.458094] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1526.458619] env[62820]: DEBUG nova.network.neutron [-] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.459990] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c070c677-c592-49ce-b6f6-6cd9920aa79d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.465951] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1526.465951] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521df5dd-d00d-0cb4-6ef2-0b58978ab62c" [ 1526.465951] env[62820]: _type = "Task" [ 1526.465951] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.476038] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521df5dd-d00d-0cb4-6ef2-0b58978ab62c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.554178] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.641891] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "b7806d81-eb2d-4724-8c40-ed88c8c77870" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.642184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "b7806d81-eb2d-4724-8c40-ed88c8c77870" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.651862] env[62820]: DEBUG nova.network.neutron [req-fb61977e-8d23-4af1-9eeb-eedc90a67ce6 req-f6b38fde-1401-4f18-957b-045c7460a6bb service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updated VIF entry in instance network info cache for port 2c04e03b-ab62-4610-b33b-f1d00be3b4be. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1526.652231] env[62820]: DEBUG nova.network.neutron [req-fb61977e-8d23-4af1-9eeb-eedc90a67ce6 req-f6b38fde-1401-4f18-957b-045c7460a6bb service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updating instance_info_cache with network_info: [{"id": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "address": "fa:16:3e:69:86:dd", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c04e03b-ab", "ovs_interfaceid": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.708490] env[62820]: INFO nova.compute.manager [None req-80101b6b-aeba-4a95-a28a-7b882427a261 tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance to original state: 'active' [ 1526.902865] env[62820]: DEBUG nova.compute.utils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1526.904358] env[62820]: DEBUG nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1526.904586] env[62820]: DEBUG nova.network.neutron [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1526.963057] env[62820]: INFO nova.compute.manager [-] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Took 1.34 seconds to deallocate network for instance. [ 1526.978917] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521df5dd-d00d-0cb4-6ef2-0b58978ab62c, 'name': SearchDatastore_Task, 'duration_secs': 0.011907} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.979236] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.979463] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1526.980120] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.980293] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.980487] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1526.980761] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bdb1a1a-c98c-4292-8f6e-30944ec754db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.986335] env[62820]: DEBUG nova.policy [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fc30b5328e44f21a88fbcaedafe5a2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e5642bbb5de4060be9d4d0ae0f8d6a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1526.992270] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1526.992455] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1526.993235] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-876dd513-880a-4471-979c-1c952cbf448a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.000673] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1527.000673] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52119776-ca61-2d1d-3ff4-7238ed4c50e6" [ 1527.000673] env[62820]: _type = "Task" [ 1527.000673] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.008564] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52119776-ca61-2d1d-3ff4-7238ed4c50e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.158223] env[62820]: DEBUG oslo_concurrency.lockutils [req-fb61977e-8d23-4af1-9eeb-eedc90a67ce6 req-f6b38fde-1401-4f18-957b-045c7460a6bb service nova] Releasing lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.393728] env[62820]: DEBUG nova.network.neutron [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Successfully updated port: 8ba6813f-c30f-416d-b888-4a33a10698ef {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1527.411021] env[62820]: DEBUG nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1527.441907] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a073b5a-2ffd-4baa-8f4a-92ad3e87db28 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.449803] env[62820]: DEBUG nova.network.neutron [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Successfully created port: a4c265b9-9afd-44f1-b48d-b95d490dc950 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1527.452482] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8051fb8-555f-4df4-b628-3aea52b2df98 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.488736] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.489073] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba040366-c16c-4e96-9d15-f4b97cc65576 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.497566] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcba2e2e-b6e7-4911-9638-807b8aca8b4e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.510267] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52119776-ca61-2d1d-3ff4-7238ed4c50e6, 'name': SearchDatastore_Task, 'duration_secs': 0.025933} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.518288] env[62820]: DEBUG nova.compute.provider_tree [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.519738] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dea8a4fc-3a9d-4337-8070-bec2e2006956 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.525600] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1527.525600] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526356ad-1915-f99a-9271-26c925b9b355" [ 1527.525600] env[62820]: _type = "Task" [ 1527.525600] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.534768] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526356ad-1915-f99a-9271-26c925b9b355, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.851549] env[62820]: DEBUG nova.compute.manager [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Received event network-vif-deleted-cf61248a-4d10-4c98-9e28-b142c204a810 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1527.851757] env[62820]: DEBUG nova.compute.manager [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Received event network-vif-plugged-8ba6813f-c30f-416d-b888-4a33a10698ef {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1527.851951] env[62820]: DEBUG oslo_concurrency.lockutils [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] Acquiring lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.852169] env[62820]: DEBUG oslo_concurrency.lockutils [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] Lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.852333] env[62820]: DEBUG oslo_concurrency.lockutils [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] Lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.852494] env[62820]: DEBUG nova.compute.manager [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] No waiting events found dispatching network-vif-plugged-8ba6813f-c30f-416d-b888-4a33a10698ef {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1527.852655] env[62820]: WARNING nova.compute.manager [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Received unexpected event network-vif-plugged-8ba6813f-c30f-416d-b888-4a33a10698ef for instance with vm_state building and task_state spawning. [ 1527.852813] env[62820]: DEBUG nova.compute.manager [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Received event network-changed-8ba6813f-c30f-416d-b888-4a33a10698ef {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1527.852960] env[62820]: DEBUG nova.compute.manager [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Refreshing instance network info cache due to event network-changed-8ba6813f-c30f-416d-b888-4a33a10698ef. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1527.853160] env[62820]: DEBUG oslo_concurrency.lockutils [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] Acquiring lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.853292] env[62820]: DEBUG oslo_concurrency.lockutils [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] Acquired lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.853445] env[62820]: DEBUG nova.network.neutron [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Refreshing network info cache for port 8ba6813f-c30f-416d-b888-4a33a10698ef {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1527.896569] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.023160] env[62820]: DEBUG nova.scheduler.client.report [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1528.038083] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526356ad-1915-f99a-9271-26c925b9b355, 'name': SearchDatastore_Task, 'duration_secs': 0.046284} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.039124] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.039393] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 492db939-78f4-4642-89dd-a01fa94f41b5/492db939-78f4-4642-89dd-a01fa94f41b5.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1528.039645] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-072c64d1-748e-4a91-a0f0-d5e9ee637891 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.047637] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1528.047637] env[62820]: value = "task-1695676" [ 1528.047637] env[62820]: _type = "Task" [ 1528.047637] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.055555] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.264367] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.264756] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.265037] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.265234] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.265398] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.267557] env[62820]: INFO nova.compute.manager [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Terminating instance [ 1528.383155] env[62820]: DEBUG nova.network.neutron [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1528.423913] env[62820]: DEBUG nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1528.461390] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1528.461594] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1528.461753] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1528.461943] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1528.462142] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1528.462260] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1528.462461] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1528.462616] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1528.462786] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1528.462989] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1528.463183] env[62820]: DEBUG nova.virt.hardware [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1528.464048] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab5664e-1513-44da-8b99-79ca2013b8de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.468623] env[62820]: DEBUG nova.network.neutron [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.487175] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2868b4f2-e46c-43f5-ae0b-ab3efab5e8f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.533014] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.535978] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.784s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.536873] env[62820]: INFO nova.compute.claims [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1528.560941] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.560941] env[62820]: INFO nova.scheduler.client.report [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Deleted allocations for instance 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae [ 1528.771210] env[62820]: DEBUG nova.compute.manager [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1528.771474] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1528.772375] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4a64c2-3a1e-4167-890d-b6ebc429e4ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.780935] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1528.781206] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c8ef39d-f70c-401d-a380-855cb0aee336 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.788567] env[62820]: DEBUG oslo_vmware.api [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1528.788567] env[62820]: value = "task-1695677" [ 1528.788567] env[62820]: _type = "Task" [ 1528.788567] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.797046] env[62820]: DEBUG oslo_vmware.api [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695677, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.971115] env[62820]: DEBUG oslo_concurrency.lockutils [req-adca8cc9-8f11-483d-aef4-2bfac0e96508 req-258562ff-b1ba-4aeb-919e-6c2aa40386d2 service nova] Releasing lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.971530] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.971698] env[62820]: DEBUG nova.network.neutron [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1529.059601] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.067584] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fde3a9bb-4fc6-4dc1-a190-662d1dde7dd1 tempest-ServerGroupTestJSON-1414605718 tempest-ServerGroupTestJSON-1414605718-project-member] Lock "3fc55bd7-48b9-4e02-af19-f186f5d0c9ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.997s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.149453] env[62820]: DEBUG nova.network.neutron [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Successfully updated port: a4c265b9-9afd-44f1-b48d-b95d490dc950 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1529.299344] env[62820]: DEBUG oslo_vmware.api [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695677, 'name': PowerOffVM_Task, 'duration_secs': 0.192016} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.299771] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1529.299909] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1529.300274] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad667fe9-44be-47fb-a626-23211377af44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.508929] env[62820]: DEBUG nova.network.neutron [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1529.561671] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.652485] env[62820]: DEBUG nova.network.neutron [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Updating instance_info_cache with network_info: [{"id": "8ba6813f-c30f-416d-b888-4a33a10698ef", "address": "fa:16:3e:0d:dc:a3", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba6813f-c3", "ovs_interfaceid": "8ba6813f-c30f-416d-b888-4a33a10698ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.654249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.654249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.654249] env[62820]: DEBUG nova.network.neutron [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1529.914547] env[62820]: DEBUG nova.compute.manager [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Received event network-vif-plugged-a4c265b9-9afd-44f1-b48d-b95d490dc950 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1529.914803] env[62820]: DEBUG oslo_concurrency.lockutils [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] Acquiring lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.916223] env[62820]: DEBUG oslo_concurrency.lockutils [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] Lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.916223] env[62820]: DEBUG oslo_concurrency.lockutils [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] Lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.916223] env[62820]: DEBUG nova.compute.manager [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] No waiting events found dispatching network-vif-plugged-a4c265b9-9afd-44f1-b48d-b95d490dc950 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1529.916223] env[62820]: WARNING nova.compute.manager [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Received unexpected event network-vif-plugged-a4c265b9-9afd-44f1-b48d-b95d490dc950 for instance with vm_state building and task_state spawning. [ 1529.916223] env[62820]: DEBUG nova.compute.manager [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Received event network-changed-a4c265b9-9afd-44f1-b48d-b95d490dc950 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1529.916223] env[62820]: DEBUG nova.compute.manager [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Refreshing instance network info cache due to event network-changed-a4c265b9-9afd-44f1-b48d-b95d490dc950. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1529.916223] env[62820]: DEBUG oslo_concurrency.lockutils [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] Acquiring lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.933265] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8401168-e973-4550-9594-e0d6eeb1669c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.942772] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1aeb5f-6573-49c4-934d-87f2073801a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.976665] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80614007-4efb-448c-b5b6-1d8fb1a00f1c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.985206] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c7c430-67a0-477b-a6be-747334232a57 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.999141] env[62820]: DEBUG nova.compute.provider_tree [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.015387] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1530.015734] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1530.016008] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleting the datastore file [datastore1] ab21fd61-3a44-42fa-92be-51214b0a9a1e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1530.019355] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe707d64-5cba-40e5-b709-40d093efa811 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.024326] env[62820]: DEBUG oslo_vmware.api [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1530.024326] env[62820]: value = "task-1695679" [ 1530.024326] env[62820]: _type = "Task" [ 1530.024326] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.035090] env[62820]: DEBUG oslo_vmware.api [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695679, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.062352] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695676, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.156889] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.157272] env[62820]: DEBUG nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Instance network_info: |[{"id": "8ba6813f-c30f-416d-b888-4a33a10698ef", "address": "fa:16:3e:0d:dc:a3", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba6813f-c3", "ovs_interfaceid": "8ba6813f-c30f-416d-b888-4a33a10698ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1530.160090] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:dc:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ba6813f-c30f-416d-b888-4a33a10698ef', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1530.169497] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Creating folder: Project (8e5642bbb5de4060be9d4d0ae0f8d6a6). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1530.169881] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04dd06b7-d227-4478-b1bb-97ede5d8e250 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.185350] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Created folder: Project (8e5642bbb5de4060be9d4d0ae0f8d6a6) in parent group-v353379. [ 1530.185580] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Creating folder: Instances. Parent ref: group-v353552. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1530.185878] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-853ce9de-a663-41d0-a2fc-dd1b79c2cb50 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.200664] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Created folder: Instances in parent group-v353552. [ 1530.201061] env[62820]: DEBUG oslo.service.loopingcall [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1530.201846] env[62820]: DEBUG nova.network.neutron [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1530.203674] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1530.204679] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5dc9bf41-9d4b-4f7b-8b8e-53941bc62e03 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.226248] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1530.226248] env[62820]: value = "task-1695682" [ 1530.226248] env[62820]: _type = "Task" [ 1530.226248] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.235475] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695682, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.430584] env[62820]: DEBUG nova.network.neutron [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Updating instance_info_cache with network_info: [{"id": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "address": "fa:16:3e:8b:0f:9d", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c265b9-9a", "ovs_interfaceid": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.502494] env[62820]: DEBUG nova.scheduler.client.report [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1530.540020] env[62820]: DEBUG oslo_vmware.api [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695679, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.569247] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695676, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.317755} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.569247] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 492db939-78f4-4642-89dd-a01fa94f41b5/492db939-78f4-4642-89dd-a01fa94f41b5.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1530.569551] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1530.569658] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb16c7d1-7348-44ee-b848-499ca7fdef14 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.578105] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1530.578105] env[62820]: value = "task-1695683" [ 1530.578105] env[62820]: _type = "Task" [ 1530.578105] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.588210] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695683, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.736606] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695682, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.933921] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.933921] env[62820]: DEBUG nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Instance network_info: |[{"id": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "address": "fa:16:3e:8b:0f:9d", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c265b9-9a", "ovs_interfaceid": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1530.934210] env[62820]: DEBUG oslo_concurrency.lockutils [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] Acquired lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.934246] env[62820]: DEBUG nova.network.neutron [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Refreshing network info cache for port a4c265b9-9afd-44f1-b48d-b95d490dc950 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1530.935456] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:0f:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4c265b9-9afd-44f1-b48d-b95d490dc950', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1530.942741] env[62820]: DEBUG oslo.service.loopingcall [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1530.943793] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1530.944037] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4649397-ba8c-416a-83e4-730aeb70345b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.966396] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1530.966396] env[62820]: value = "task-1695684" [ 1530.966396] env[62820]: _type = "Task" [ 1530.966396] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.975991] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695684, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.008781] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.009514] env[62820]: DEBUG nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1531.012743] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.611s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.013854] env[62820]: DEBUG nova.objects.instance [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lazy-loading 'resources' on Instance uuid 56c371a9-983f-4d5f-8abf-0183736c374c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1531.039023] env[62820]: DEBUG oslo_vmware.api [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695679, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.553399} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.039023] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1531.039023] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1531.039023] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1531.039023] env[62820]: INFO nova.compute.manager [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1531.039023] env[62820]: DEBUG oslo.service.loopingcall [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.039023] env[62820]: DEBUG nova.compute.manager [-] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1531.039023] env[62820]: DEBUG nova.network.neutron [-] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1531.089062] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695683, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077416} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.089426] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1531.090329] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea1b455-9410-4c3e-9670-c8c6029eb39a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.117523] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 492db939-78f4-4642-89dd-a01fa94f41b5/492db939-78f4-4642-89dd-a01fa94f41b5.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1531.117523] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7abdeb90-6885-45db-8344-77e72ca6914d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.139413] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1531.139413] env[62820]: value = "task-1695685" [ 1531.139413] env[62820]: _type = "Task" [ 1531.139413] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.151194] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695685, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.237164] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695682, 'name': CreateVM_Task, 'duration_secs': 0.878075} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.237349] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1531.238123] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.238258] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.238591] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1531.238848] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38546a24-8808-46d2-a669-7929b4777a7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.244389] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1531.244389] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5235a76b-3c6a-5b57-8af8-cda9ecd15da0" [ 1531.244389] env[62820]: _type = "Task" [ 1531.244389] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.252590] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5235a76b-3c6a-5b57-8af8-cda9ecd15da0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.481012] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695684, 'name': CreateVM_Task, 'duration_secs': 0.480723} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.481285] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1531.481991] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.515723] env[62820]: DEBUG nova.compute.utils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1531.523582] env[62820]: DEBUG nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1531.523770] env[62820]: DEBUG nova.network.neutron [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1531.603239] env[62820]: DEBUG nova.policy [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83ca63eee0264ec6bf703865252ad754', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29e82741460d4dfe9ec37eba75f9e95b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1531.651209] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695685, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.745162] env[62820]: DEBUG nova.network.neutron [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Updated VIF entry in instance network info cache for port a4c265b9-9afd-44f1-b48d-b95d490dc950. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1531.746095] env[62820]: DEBUG nova.network.neutron [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Updating instance_info_cache with network_info: [{"id": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "address": "fa:16:3e:8b:0f:9d", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c265b9-9a", "ovs_interfaceid": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.762796] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5235a76b-3c6a-5b57-8af8-cda9ecd15da0, 'name': SearchDatastore_Task, 'duration_secs': 0.020233} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.768700] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.769041] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1531.769391] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.769587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.769836] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1531.770446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.770818] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1531.771675] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec66ca7f-f70c-4920-94fa-e580bb76d8b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.774098] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e703cb5-4a20-49b3-ad87-c221714d03b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.784443] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1531.784443] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52097fa8-ef63-afa8-135e-cc27c23998ad" [ 1531.784443] env[62820]: _type = "Task" [ 1531.784443] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.785740] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1531.785900] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1531.789475] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad38d1b3-512c-4e5b-b2ad-22e8de863de6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.795968] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1531.795968] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f63ae1-e174-141a-ab8a-ab593fac729f" [ 1531.795968] env[62820]: _type = "Task" [ 1531.795968] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.801733] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52097fa8-ef63-afa8-135e-cc27c23998ad, 'name': SearchDatastore_Task, 'duration_secs': 0.014137} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.805143] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.805386] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1531.805594] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.812319] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f63ae1-e174-141a-ab8a-ab593fac729f, 'name': SearchDatastore_Task, 'duration_secs': 0.013662} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.813110] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b94f92b-1914-47e1-9406-17352905380c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.819453] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1531.819453] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526b4841-8ca2-80b7-6763-633bcb9a058d" [ 1531.819453] env[62820]: _type = "Task" [ 1531.819453] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.829684] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526b4841-8ca2-80b7-6763-633bcb9a058d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.864291] env[62820]: DEBUG nova.network.neutron [-] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.941014] env[62820]: DEBUG nova.compute.manager [req-92fb0f4c-8528-4198-a5cc-e4b5c0d8e7aa req-a2d7f90e-1a19-4b1d-8cff-3a06df5c50c4 service nova] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Received event network-vif-deleted-5af0a5c5-a176-477e-b59a-fa82e9eea9a7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1532.015446] env[62820]: DEBUG nova.network.neutron [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Successfully created port: b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1532.031029] env[62820]: DEBUG nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1532.082199] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1f72f5-db36-48cf-8d0d-f5029b66f57f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.090966] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50138252-a34b-4dab-8feb-2d145e1a6ceb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.122503] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddb1acf-08e6-43b9-bc76-5962bb4277eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.132023] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048c4df6-4664-4afa-b0e0-afbb5f88cf1f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.146151] env[62820]: DEBUG nova.compute.provider_tree [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.155062] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695685, 'name': ReconfigVM_Task, 'duration_secs': 0.777275} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.155213] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 492db939-78f4-4642-89dd-a01fa94f41b5/492db939-78f4-4642-89dd-a01fa94f41b5.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1532.155756] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1afc3ac-1d62-4997-8212-20dae1351fe2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.163899] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1532.163899] env[62820]: value = "task-1695686" [ 1532.163899] env[62820]: _type = "Task" [ 1532.163899] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.173978] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695686, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.254326] env[62820]: DEBUG oslo_concurrency.lockutils [req-fef4b1f9-988f-4f0c-b9d4-4709c5fc202b req-c098d506-9a74-4273-9676-361710a34471 service nova] Releasing lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.330516] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526b4841-8ca2-80b7-6763-633bcb9a058d, 'name': SearchDatastore_Task, 'duration_secs': 0.014683} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.330804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.331088] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31/eafe98b7-a67d-4bab-bfc0-8367ae069d31.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1532.331404] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.331557] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1532.331774] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb7f3d87-644c-4634-b462-2b008aa0c391 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.333684] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6418ccec-5445-47a8-ab87-166a78df93ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.343431] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1532.343431] env[62820]: value = "task-1695687" [ 1532.343431] env[62820]: _type = "Task" [ 1532.343431] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.347949] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1532.347949] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1532.348911] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4b427cc-5da3-4480-8a89-27cdd992ea8e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.354157] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695687, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.358153] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1532.358153] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52891f08-2491-8253-4bcf-79f3a2527c6a" [ 1532.358153] env[62820]: _type = "Task" [ 1532.358153] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.365859] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52891f08-2491-8253-4bcf-79f3a2527c6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.366287] env[62820]: INFO nova.compute.manager [-] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Took 1.33 seconds to deallocate network for instance. [ 1532.651549] env[62820]: DEBUG nova.scheduler.client.report [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1532.675879] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695686, 'name': Rename_Task, 'duration_secs': 0.16457} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.676199] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1532.676465] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67677338-3acf-4d8a-9f50-3e415ee1c736 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.685925] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1532.685925] env[62820]: value = "task-1695688" [ 1532.685925] env[62820]: _type = "Task" [ 1532.685925] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.695887] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695688, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.853908] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695687, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.868153] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52891f08-2491-8253-4bcf-79f3a2527c6a, 'name': SearchDatastore_Task, 'duration_secs': 0.011916} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.868965] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-675d18aa-a7a4-4655-bb64-c06eb5632671 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.871848] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.875374] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1532.875374] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52703ee4-c54d-c48f-2ce1-e968a014a414" [ 1532.875374] env[62820]: _type = "Task" [ 1532.875374] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.883746] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52703ee4-c54d-c48f-2ce1-e968a014a414, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.041059] env[62820]: DEBUG nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1533.065557] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1533.065804] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1533.065963] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1533.066166] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1533.066325] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1533.066453] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1533.066657] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1533.066918] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1533.067117] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1533.067297] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1533.067471] env[62820]: DEBUG nova.virt.hardware [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1533.068342] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1ab988-5fd0-4e5d-83eb-e810096c7cf2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.076795] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1547e0-7c4c-4a5f-b60a-07aea028072b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.156948] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.144s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.159925] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.541s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.160781] env[62820]: INFO nova.compute.claims [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1533.182007] env[62820]: INFO nova.scheduler.client.report [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Deleted allocations for instance 56c371a9-983f-4d5f-8abf-0183736c374c [ 1533.197587] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695688, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.356665] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695687, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.387970] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52703ee4-c54d-c48f-2ce1-e968a014a414, 'name': SearchDatastore_Task, 'duration_secs': 0.010012} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.387970] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.388187] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485/0dd0e112-7a7c-4b37-8938-bb98aab2d485.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1533.388437] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4183d9b-2994-4bff-8cfe-87a410f9686c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.396533] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1533.396533] env[62820]: value = "task-1695689" [ 1533.396533] env[62820]: _type = "Task" [ 1533.396533] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.404847] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.579848] env[62820]: DEBUG nova.network.neutron [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Successfully updated port: b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1533.693704] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba11448c-8bf0-4eec-aace-4d1c92d1a807 tempest-ImagesNegativeTestJSON-644647053 tempest-ImagesNegativeTestJSON-644647053-project-member] Lock "56c371a9-983f-4d5f-8abf-0183736c374c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.920s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.699088] env[62820]: DEBUG oslo_vmware.api [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695688, 'name': PowerOnVM_Task, 'duration_secs': 0.687156} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.699298] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1533.699500] env[62820]: INFO nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Took 10.33 seconds to spawn the instance on the hypervisor. [ 1533.699674] env[62820]: DEBUG nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1533.700463] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d1dee1-6558-4df2-999b-bb58d1dc28e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.855060] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695687, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.485055} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.855762] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31/eafe98b7-a67d-4bab-bfc0-8367ae069d31.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1533.855762] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1533.855917] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df092647-99c6-4359-b98d-f521f22fe8b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.863881] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1533.863881] env[62820]: value = "task-1695690" [ 1533.863881] env[62820]: _type = "Task" [ 1533.863881] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.883137] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695690, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.907131] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.031875] env[62820]: DEBUG nova.compute.manager [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Received event network-vif-plugged-b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1534.032115] env[62820]: DEBUG oslo_concurrency.lockutils [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] Acquiring lock "871195a8-8b7d-433f-a0b5-c570c65faf1e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.032323] env[62820]: DEBUG oslo_concurrency.lockutils [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] Lock "871195a8-8b7d-433f-a0b5-c570c65faf1e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.032576] env[62820]: DEBUG oslo_concurrency.lockutils [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] Lock "871195a8-8b7d-433f-a0b5-c570c65faf1e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.032701] env[62820]: DEBUG nova.compute.manager [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] No waiting events found dispatching network-vif-plugged-b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1534.032859] env[62820]: WARNING nova.compute.manager [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Received unexpected event network-vif-plugged-b7583a76-a88b-4483-b4ac-82ca58435896 for instance with vm_state building and task_state spawning. [ 1534.032992] env[62820]: DEBUG nova.compute.manager [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Received event network-changed-b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1534.033122] env[62820]: DEBUG nova.compute.manager [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Refreshing instance network info cache due to event network-changed-b7583a76-a88b-4483-b4ac-82ca58435896. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1534.033300] env[62820]: DEBUG oslo_concurrency.lockutils [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] Acquiring lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.033429] env[62820]: DEBUG oslo_concurrency.lockutils [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] Acquired lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.033592] env[62820]: DEBUG nova.network.neutron [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Refreshing network info cache for port b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1534.082945] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquiring lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.217613] env[62820]: INFO nova.compute.manager [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Took 44.35 seconds to build instance. [ 1534.376022] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695690, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067032} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.376335] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1534.377223] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f22cf0c-4ed2-4c6a-8414-4dd733938195 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.405294] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31/eafe98b7-a67d-4bab-bfc0-8367ae069d31.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1534.408460] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5033cdb5-3819-4da3-812b-0b46b05dd6dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.433059] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.936737} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.437411] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485/0dd0e112-7a7c-4b37-8938-bb98aab2d485.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1534.437812] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1534.438290] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1534.438290] env[62820]: value = "task-1695691" [ 1534.438290] env[62820]: _type = "Task" [ 1534.438290] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.438767] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-663382a5-fea4-4c7c-99f4-4baaeeec7e59 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.450235] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695691, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.451728] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1534.451728] env[62820]: value = "task-1695692" [ 1534.451728] env[62820]: _type = "Task" [ 1534.451728] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.463134] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695692, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.603437] env[62820]: DEBUG nova.network.neutron [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1534.620323] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a58c311-ad3a-4f55-90aa-11b2e737162c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.630759] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2366512-21c4-4b8c-97ef-2f88f5ec4545 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.670498] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1751ff81-de03-42e7-a8ca-5922a4bb805e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.679796] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a40c082-145d-4f92-bf3a-a6ffc73db15b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.699426] env[62820]: DEBUG nova.compute.provider_tree [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1534.719479] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bff00c34-f542-4c86-9397-ced750c388a8 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "492db939-78f4-4642-89dd-a01fa94f41b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.867s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.727293] env[62820]: DEBUG nova.network.neutron [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.954031] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695691, 'name': ReconfigVM_Task, 'duration_secs': 0.340336} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.954031] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Reconfigured VM instance instance-00000037 to attach disk [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31/eafe98b7-a67d-4bab-bfc0-8367ae069d31.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1534.954031] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-584c9262-4d29-4b70-8050-99efb63feee6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.968882] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695692, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073821} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.970596] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1534.971373] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1534.971373] env[62820]: value = "task-1695693" [ 1534.971373] env[62820]: _type = "Task" [ 1534.971373] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.972445] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58023ff6-e0e3-4f22-9a6a-1ca96b3ccbdb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.988024] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695693, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.010717] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485/0dd0e112-7a7c-4b37-8938-bb98aab2d485.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1535.011561] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92d53f88-21ac-40fe-ac39-eb70ace18655 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.039793] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1535.039793] env[62820]: value = "task-1695694" [ 1535.039793] env[62820]: _type = "Task" [ 1535.039793] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.052752] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695694, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.203604] env[62820]: DEBUG nova.scheduler.client.report [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1535.225029] env[62820]: DEBUG nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1535.233609] env[62820]: DEBUG oslo_concurrency.lockutils [req-cdb3f34d-e7eb-46b6-94e4-adc2b12d5b53 req-23baf042-a020-4c93-b41d-a7d56222f75e service nova] Releasing lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.233609] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquired lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.233609] env[62820]: DEBUG nova.network.neutron [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1535.352403] env[62820]: DEBUG nova.compute.manager [req-7d9179ae-7898-4f29-9808-14ef0779a566 req-d70f0732-bc7c-4931-84e0-7f01d111652d service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Received event network-changed-2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1535.352609] env[62820]: DEBUG nova.compute.manager [req-7d9179ae-7898-4f29-9808-14ef0779a566 req-d70f0732-bc7c-4931-84e0-7f01d111652d service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Refreshing instance network info cache due to event network-changed-2c04e03b-ab62-4610-b33b-f1d00be3b4be. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1535.352869] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d9179ae-7898-4f29-9808-14ef0779a566 req-d70f0732-bc7c-4931-84e0-7f01d111652d service nova] Acquiring lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.353280] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d9179ae-7898-4f29-9808-14ef0779a566 req-d70f0732-bc7c-4931-84e0-7f01d111652d service nova] Acquired lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.353280] env[62820]: DEBUG nova.network.neutron [req-7d9179ae-7898-4f29-9808-14ef0779a566 req-d70f0732-bc7c-4931-84e0-7f01d111652d service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Refreshing network info cache for port 2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1535.487705] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695693, 'name': Rename_Task, 'duration_secs': 0.17146} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.488008] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1535.488290] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1d4ba9b-4e31-47b5-997e-87051612e0d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.496604] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1535.496604] env[62820]: value = "task-1695695" [ 1535.496604] env[62820]: _type = "Task" [ 1535.496604] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.508895] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695695, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.551407] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695694, 'name': ReconfigVM_Task, 'duration_secs': 0.300826} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.551711] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485/0dd0e112-7a7c-4b37-8938-bb98aab2d485.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1535.553344] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4cbbe004-e1be-469e-9523-04a5ff0ea677 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.561605] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1535.561605] env[62820]: value = "task-1695696" [ 1535.561605] env[62820]: _type = "Task" [ 1535.561605] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.570958] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695696, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.709010] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.709622] env[62820]: DEBUG nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1535.714014] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.225s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.715640] env[62820]: INFO nova.compute.claims [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1535.758374] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.812823] env[62820]: DEBUG nova.network.neutron [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1536.009645] env[62820]: DEBUG oslo_vmware.api [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695695, 'name': PowerOnVM_Task, 'duration_secs': 0.491173} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.009930] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1536.010153] env[62820]: INFO nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Took 10.20 seconds to spawn the instance on the hypervisor. [ 1536.010343] env[62820]: DEBUG nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1536.011153] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba498ce-9150-4f5a-956b-a2eaee878fad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.077569] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695696, 'name': Rename_Task, 'duration_secs': 0.159769} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.077569] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1536.077569] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-108d320d-ba36-4b17-a871-80c9adb78a68 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.085455] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1536.085455] env[62820]: value = "task-1695697" [ 1536.085455] env[62820]: _type = "Task" [ 1536.085455] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.095811] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.140628] env[62820]: DEBUG nova.network.neutron [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Updating instance_info_cache with network_info: [{"id": "b7583a76-a88b-4483-b4ac-82ca58435896", "address": "fa:16:3e:1e:a0:a4", "network": {"id": "c8a0d5ff-b4cf-426e-ac21-8908c51fe8d8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1793043112-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29e82741460d4dfe9ec37eba75f9e95b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a55f45a-d631-4ebc-b73b-8a30bd0a32a8", "external-id": "nsx-vlan-transportzone-303", "segmentation_id": 303, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7583a76-a8", "ovs_interfaceid": "b7583a76-a88b-4483-b4ac-82ca58435896", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.222934] env[62820]: DEBUG nova.compute.utils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1536.224326] env[62820]: DEBUG nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1536.224506] env[62820]: DEBUG nova.network.neutron [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1536.297244] env[62820]: DEBUG nova.compute.manager [req-a3f3bda4-f199-4d5b-840c-c47c6ea67f22 req-4957731e-025f-4cbf-8ef7-c8171cf8f132 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Received event network-changed-2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1536.297446] env[62820]: DEBUG nova.compute.manager [req-a3f3bda4-f199-4d5b-840c-c47c6ea67f22 req-4957731e-025f-4cbf-8ef7-c8171cf8f132 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Refreshing instance network info cache due to event network-changed-2c04e03b-ab62-4610-b33b-f1d00be3b4be. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1536.297635] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3f3bda4-f199-4d5b-840c-c47c6ea67f22 req-4957731e-025f-4cbf-8ef7-c8171cf8f132 service nova] Acquiring lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.302897] env[62820]: DEBUG nova.policy [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4154345a1b8e48f6836812eb23d67d4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2789820b5ab4db18a288fb26a7efe46', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1536.344429] env[62820]: DEBUG nova.network.neutron [req-7d9179ae-7898-4f29-9808-14ef0779a566 req-d70f0732-bc7c-4931-84e0-7f01d111652d service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updated VIF entry in instance network info cache for port 2c04e03b-ab62-4610-b33b-f1d00be3b4be. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1536.344705] env[62820]: DEBUG nova.network.neutron [req-7d9179ae-7898-4f29-9808-14ef0779a566 req-d70f0732-bc7c-4931-84e0-7f01d111652d service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updating instance_info_cache with network_info: [{"id": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "address": "fa:16:3e:69:86:dd", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c04e03b-ab", "ovs_interfaceid": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.537606] env[62820]: INFO nova.compute.manager [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Took 43.96 seconds to build instance. [ 1536.604993] env[62820]: DEBUG oslo_vmware.api [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695697, 'name': PowerOnVM_Task, 'duration_secs': 0.509321} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.605438] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1536.605748] env[62820]: INFO nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1536.606542] env[62820]: DEBUG nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1536.607883] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b54542-4608-42a6-9af2-caf7eb6c7e3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.648470] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Releasing lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.648470] env[62820]: DEBUG nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Instance network_info: |[{"id": "b7583a76-a88b-4483-b4ac-82ca58435896", "address": "fa:16:3e:1e:a0:a4", "network": {"id": "c8a0d5ff-b4cf-426e-ac21-8908c51fe8d8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1793043112-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29e82741460d4dfe9ec37eba75f9e95b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a55f45a-d631-4ebc-b73b-8a30bd0a32a8", "external-id": "nsx-vlan-transportzone-303", "segmentation_id": 303, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7583a76-a8", "ovs_interfaceid": "b7583a76-a88b-4483-b4ac-82ca58435896", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1536.648470] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:a0:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a55f45a-d631-4ebc-b73b-8a30bd0a32a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7583a76-a88b-4483-b4ac-82ca58435896', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1536.659411] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Creating folder: Project (29e82741460d4dfe9ec37eba75f9e95b). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1536.659883] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61c6b9bc-ee74-4192-b301-551f15277018 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.677601] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Created folder: Project (29e82741460d4dfe9ec37eba75f9e95b) in parent group-v353379. [ 1536.677601] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Creating folder: Instances. Parent ref: group-v353556. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1536.677601] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80c04f4c-c383-4255-8e21-134f117c4653 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.692831] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Created folder: Instances in parent group-v353556. [ 1536.693144] env[62820]: DEBUG oslo.service.loopingcall [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1536.693614] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1536.693856] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f384d9b9-4f48-4519-8e80-701b0b2cb9c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.716493] env[62820]: DEBUG nova.network.neutron [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Successfully created port: 0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1536.721927] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1536.721927] env[62820]: value = "task-1695700" [ 1536.721927] env[62820]: _type = "Task" [ 1536.721927] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.730226] env[62820]: DEBUG nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1536.746881] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695700, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.847388] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d9179ae-7898-4f29-9808-14ef0779a566 req-d70f0732-bc7c-4931-84e0-7f01d111652d service nova] Releasing lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.847658] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3f3bda4-f199-4d5b-840c-c47c6ea67f22 req-4957731e-025f-4cbf-8ef7-c8171cf8f132 service nova] Acquired lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.848231] env[62820]: DEBUG nova.network.neutron [req-a3f3bda4-f199-4d5b-840c-c47c6ea67f22 req-4957731e-025f-4cbf-8ef7-c8171cf8f132 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Refreshing network info cache for port 2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.040647] env[62820]: DEBUG oslo_concurrency.lockutils [None req-edd4e548-a7ca-4fe2-9a3d-fc4aa014a83b tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.879s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.110346] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e732b9-8a0e-4b27-b539-f937a59bcf5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.119000] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df701310-3fb8-4d25-b9e1-5b7c9151ce1c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.164369] env[62820]: INFO nova.compute.manager [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Took 40.17 seconds to build instance. [ 1537.165909] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e87d7c-66b9-4e97-b687-13d7653b18f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.176561] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7479ea58-2a01-463f-8dc4-07bd78a2a481 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.192987] env[62820]: DEBUG nova.compute.provider_tree [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1537.232558] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695700, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.670992] env[62820]: DEBUG oslo_concurrency.lockutils [None req-81602e2b-848a-4e87-95be-462ee8f39fd3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.878s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.696162] env[62820]: DEBUG nova.scheduler.client.report [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1537.735897] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695700, 'name': CreateVM_Task, 'duration_secs': 0.646503} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.736179] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1537.736904] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.737100] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.737496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1537.737711] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c89f6a8-57a7-49d4-bded-bcc52100c0c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.742900] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1537.742900] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ad3a9f-6cb6-08d9-8a2b-4215b5c3fb75" [ 1537.742900] env[62820]: _type = "Task" [ 1537.742900] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.751742] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ad3a9f-6cb6-08d9-8a2b-4215b5c3fb75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.754045] env[62820]: DEBUG nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1537.784164] env[62820]: DEBUG nova.network.neutron [req-a3f3bda4-f199-4d5b-840c-c47c6ea67f22 req-4957731e-025f-4cbf-8ef7-c8171cf8f132 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updated VIF entry in instance network info cache for port 2c04e03b-ab62-4610-b33b-f1d00be3b4be. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1537.784485] env[62820]: DEBUG nova.network.neutron [req-a3f3bda4-f199-4d5b-840c-c47c6ea67f22 req-4957731e-025f-4cbf-8ef7-c8171cf8f132 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updating instance_info_cache with network_info: [{"id": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "address": "fa:16:3e:69:86:dd", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c04e03b-ab", "ovs_interfaceid": "2c04e03b-ab62-4610-b33b-f1d00be3b4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.790235] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1537.790455] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1537.790968] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1537.790968] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1537.790968] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1537.791127] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1537.791336] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1537.791500] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1537.791930] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1537.792254] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1537.792475] env[62820]: DEBUG nova.virt.hardware [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1537.793313] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372cc3d7-5241-467a-8fae-b2b8ac7657ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.802881] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ccec28-8514-4a1e-9cc0-243923ba67e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.201834] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.202640] env[62820]: DEBUG nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1538.209973] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.917s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.210219] env[62820]: DEBUG nova.objects.instance [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lazy-loading 'resources' on Instance uuid c06e3dcd-b997-497c-865d-5f277695cd7a {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1538.259656] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ad3a9f-6cb6-08d9-8a2b-4215b5c3fb75, 'name': SearchDatastore_Task, 'duration_secs': 0.013348} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.259656] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.259656] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1538.259656] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.259656] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.259656] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1538.259656] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d8e79ad-164a-44a9-a79c-a981f463075a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.270765] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1538.270970] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1538.271777] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c00213f8-6fa4-4eda-8cc5-f24fcc346708 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.278750] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1538.278750] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c2e291-cfcd-3d5d-34be-9de3bece551a" [ 1538.278750] env[62820]: _type = "Task" [ 1538.278750] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.290543] env[62820]: DEBUG oslo_concurrency.lockutils [req-a3f3bda4-f199-4d5b-840c-c47c6ea67f22 req-4957731e-025f-4cbf-8ef7-c8171cf8f132 service nova] Releasing lock "refresh_cache-492db939-78f4-4642-89dd-a01fa94f41b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.291396] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c2e291-cfcd-3d5d-34be-9de3bece551a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.458867] env[62820]: INFO nova.compute.manager [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Rescuing [ 1538.459138] env[62820]: DEBUG oslo_concurrency.lockutils [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.459323] env[62820]: DEBUG oslo_concurrency.lockutils [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.459498] env[62820]: DEBUG nova.network.neutron [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1538.602699] env[62820]: DEBUG nova.compute.manager [req-6031e03a-c12c-4fad-8d41-9f474a82a531 req-b1c8685d-630a-441c-a422-a0098f9a68d8 service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Received event network-vif-plugged-0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1538.602976] env[62820]: DEBUG oslo_concurrency.lockutils [req-6031e03a-c12c-4fad-8d41-9f474a82a531 req-b1c8685d-630a-441c-a422-a0098f9a68d8 service nova] Acquiring lock "7a755ef6-67bc-4242-9343-c54c8566adf8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.603358] env[62820]: DEBUG oslo_concurrency.lockutils [req-6031e03a-c12c-4fad-8d41-9f474a82a531 req-b1c8685d-630a-441c-a422-a0098f9a68d8 service nova] Lock "7a755ef6-67bc-4242-9343-c54c8566adf8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.603658] env[62820]: DEBUG oslo_concurrency.lockutils [req-6031e03a-c12c-4fad-8d41-9f474a82a531 req-b1c8685d-630a-441c-a422-a0098f9a68d8 service nova] Lock "7a755ef6-67bc-4242-9343-c54c8566adf8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.603863] env[62820]: DEBUG nova.compute.manager [req-6031e03a-c12c-4fad-8d41-9f474a82a531 req-b1c8685d-630a-441c-a422-a0098f9a68d8 service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] No waiting events found dispatching network-vif-plugged-0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1538.604091] env[62820]: WARNING nova.compute.manager [req-6031e03a-c12c-4fad-8d41-9f474a82a531 req-b1c8685d-630a-441c-a422-a0098f9a68d8 service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Received unexpected event network-vif-plugged-0c860660-820e-425d-963c-906681be61b6 for instance with vm_state building and task_state spawning. [ 1538.713220] env[62820]: DEBUG nova.compute.utils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1538.720901] env[62820]: DEBUG nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1538.720901] env[62820]: DEBUG nova.network.neutron [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1538.793148] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c2e291-cfcd-3d5d-34be-9de3bece551a, 'name': SearchDatastore_Task, 'duration_secs': 0.011962} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.793509] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8127b51d-aac9-48bd-81f7-f343feeba802 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.804131] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1538.804131] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5279a256-0a02-f5d1-db82-b9bee721c3b2" [ 1538.804131] env[62820]: _type = "Task" [ 1538.804131] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.812565] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5279a256-0a02-f5d1-db82-b9bee721c3b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.816869] env[62820]: DEBUG nova.policy [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '134e01f94e1e49cba6b909dd3e81715d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfe9869537de4334a0c8ce91fd062659', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1538.844354] env[62820]: DEBUG nova.network.neutron [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Successfully updated port: 0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1539.216478] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09477d53-586d-46df-a170-d3b9bebbcb7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.220837] env[62820]: DEBUG nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1539.231255] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e26d27b-ae15-4908-8eb3-676cbc34d082 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.271816] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e20224-2aaf-4547-baa5-928eb85ee15f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.289209] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bef2887-1acd-4f88-90f9-49f8f6ec1057 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.306328] env[62820]: DEBUG nova.compute.provider_tree [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1539.323533] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5279a256-0a02-f5d1-db82-b9bee721c3b2, 'name': SearchDatastore_Task, 'duration_secs': 0.010061} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.323847] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.324202] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 871195a8-8b7d-433f-a0b5-c570c65faf1e/871195a8-8b7d-433f-a0b5-c570c65faf1e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1539.324777] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bb1bf73-7be0-4501-aded-f684bfc3a83d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.333535] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1539.333535] env[62820]: value = "task-1695701" [ 1539.333535] env[62820]: _type = "Task" [ 1539.333535] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.344587] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.353084] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquiring lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.353084] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquired lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.353084] env[62820]: DEBUG nova.network.neutron [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1539.794729] env[62820]: DEBUG nova.network.neutron [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Updating instance_info_cache with network_info: [{"id": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "address": "fa:16:3e:8b:0f:9d", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c265b9-9a", "ovs_interfaceid": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.813362] env[62820]: DEBUG nova.scheduler.client.report [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1539.851310] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695701, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.924694] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.924867] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.950166] env[62820]: DEBUG nova.network.neutron [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1539.953742] env[62820]: DEBUG nova.network.neutron [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Successfully created port: 89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1540.234214] env[62820]: DEBUG nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1540.257117] env[62820]: DEBUG nova.network.neutron [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Updating instance_info_cache with network_info: [{"id": "0c860660-820e-425d-963c-906681be61b6", "address": "fa:16:3e:e8:17:73", "network": {"id": "3af0e10a-e365-401a-96cf-96144942fa3b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-82307243-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2789820b5ab4db18a288fb26a7efe46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c860660-82", "ovs_interfaceid": "0c860660-820e-425d-963c-906681be61b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.265371] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1540.265371] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1540.265371] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1540.265371] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1540.265371] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1540.265371] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1540.265371] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1540.266244] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1540.266244] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1540.266244] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1540.266244] env[62820]: DEBUG nova.virt.hardware [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1540.266816] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd55900d-faed-4c08-83ed-57819fa4d21e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.277890] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27face3-78cd-4e54-8512-e27c33b8c5cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.298154] env[62820]: DEBUG oslo_concurrency.lockutils [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.321638] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.321638] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.382s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.321638] env[62820]: DEBUG nova.objects.instance [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lazy-loading 'resources' on Instance uuid ee188979-e740-4125-a17f-1c02ef9588f1 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1540.352152] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695701, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.849907} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.352152] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 871195a8-8b7d-433f-a0b5-c570c65faf1e/871195a8-8b7d-433f-a0b5-c570c65faf1e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1540.352152] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1540.352670] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-290af313-2dea-48ad-9d19-c0728e26ec0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.356575] env[62820]: INFO nova.scheduler.client.report [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Deleted allocations for instance c06e3dcd-b997-497c-865d-5f277695cd7a [ 1540.364648] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1540.364648] env[62820]: value = "task-1695702" [ 1540.364648] env[62820]: _type = "Task" [ 1540.364648] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.374282] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.427650] env[62820]: DEBUG nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1540.635684] env[62820]: DEBUG nova.compute.manager [req-7e135ddc-ea24-4729-a395-9b970f2314c2 req-ec573d04-0e49-4b4c-aeb1-83ef2014465d service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Received event network-changed-0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1540.635891] env[62820]: DEBUG nova.compute.manager [req-7e135ddc-ea24-4729-a395-9b970f2314c2 req-ec573d04-0e49-4b4c-aeb1-83ef2014465d service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Refreshing instance network info cache due to event network-changed-0c860660-820e-425d-963c-906681be61b6. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1540.636100] env[62820]: DEBUG oslo_concurrency.lockutils [req-7e135ddc-ea24-4729-a395-9b970f2314c2 req-ec573d04-0e49-4b4c-aeb1-83ef2014465d service nova] Acquiring lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.761950] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Releasing lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.762351] env[62820]: DEBUG nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Instance network_info: |[{"id": "0c860660-820e-425d-963c-906681be61b6", "address": "fa:16:3e:e8:17:73", "network": {"id": "3af0e10a-e365-401a-96cf-96144942fa3b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-82307243-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2789820b5ab4db18a288fb26a7efe46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c860660-82", "ovs_interfaceid": "0c860660-820e-425d-963c-906681be61b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1540.762676] env[62820]: DEBUG oslo_concurrency.lockutils [req-7e135ddc-ea24-4729-a395-9b970f2314c2 req-ec573d04-0e49-4b4c-aeb1-83ef2014465d service nova] Acquired lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.762860] env[62820]: DEBUG nova.network.neutron [req-7e135ddc-ea24-4729-a395-9b970f2314c2 req-ec573d04-0e49-4b4c-aeb1-83ef2014465d service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Refreshing network info cache for port 0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1540.764532] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:17:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c860660-820e-425d-963c-906681be61b6', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1540.778133] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Creating folder: Project (e2789820b5ab4db18a288fb26a7efe46). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1540.781663] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00995264-9102-44f1-9529-cb13e7636cdf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.796507] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Created folder: Project (e2789820b5ab4db18a288fb26a7efe46) in parent group-v353379. [ 1540.796887] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Creating folder: Instances. Parent ref: group-v353559. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1540.797172] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2b5f479-2fb2-4986-ab26-8c2033d589fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.812214] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Created folder: Instances in parent group-v353559. [ 1540.812214] env[62820]: DEBUG oslo.service.loopingcall [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.812214] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1540.812214] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1b23822-4c02-42db-a909-572535ed6701 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.845495] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1540.845495] env[62820]: value = "task-1695705" [ 1540.845495] env[62820]: _type = "Task" [ 1540.845495] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.861076] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695705, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.867394] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbab881d-9b50-44af-8978-24e799bcff1e tempest-ServersNegativeTestMultiTenantJSON-1549763222 tempest-ServersNegativeTestMultiTenantJSON-1549763222-project-member] Lock "c06e3dcd-b997-497c-865d-5f277695cd7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.665s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.878844] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.296739} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.879187] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1540.880071] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11222f47-00dd-46ab-ba20-13808d4ad565 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.916667] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 871195a8-8b7d-433f-a0b5-c570c65faf1e/871195a8-8b7d-433f-a0b5-c570c65faf1e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1540.920242] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07836006-6f9e-40d5-ba43-03d31c3dfef3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.945113] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1540.945113] env[62820]: value = "task-1695706" [ 1540.945113] env[62820]: _type = "Task" [ 1540.945113] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.960941] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695706, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.962347] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.289021] env[62820]: DEBUG nova.network.neutron [req-7e135ddc-ea24-4729-a395-9b970f2314c2 req-ec573d04-0e49-4b4c-aeb1-83ef2014465d service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Updated VIF entry in instance network info cache for port 0c860660-820e-425d-963c-906681be61b6. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1541.289157] env[62820]: DEBUG nova.network.neutron [req-7e135ddc-ea24-4729-a395-9b970f2314c2 req-ec573d04-0e49-4b4c-aeb1-83ef2014465d service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Updating instance_info_cache with network_info: [{"id": "0c860660-820e-425d-963c-906681be61b6", "address": "fa:16:3e:e8:17:73", "network": {"id": "3af0e10a-e365-401a-96cf-96144942fa3b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-82307243-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2789820b5ab4db18a288fb26a7efe46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c860660-82", "ovs_interfaceid": "0c860660-820e-425d-963c-906681be61b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.339187] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e811a775-63f9-4a0f-9467-d94103f3416d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.342499] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1541.342758] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d25d727b-c171-4bdd-9aa7-e881de85b6bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.355146] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1541.355146] env[62820]: value = "task-1695707" [ 1541.355146] env[62820]: _type = "Task" [ 1541.355146] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.356984] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caacb8f5-a380-4e04-acb5-1991867f73e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.368720] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695705, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.402616] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.404088] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ea12b8-fde5-4119-ab1a-c59a98fc16ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.422425] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc332b2-c83e-4343-81ac-a40589b68b18 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.445938] env[62820]: DEBUG nova.compute.provider_tree [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.462094] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.792795] env[62820]: DEBUG oslo_concurrency.lockutils [req-7e135ddc-ea24-4729-a395-9b970f2314c2 req-ec573d04-0e49-4b4c-aeb1-83ef2014465d service nova] Releasing lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.857687] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695705, 'name': CreateVM_Task, 'duration_secs': 0.688817} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.860856] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1541.861595] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.861803] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.862210] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1541.862738] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c03bd8b-ef8a-4123-8f90-900e65717bad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.869293] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695707, 'name': PowerOffVM_Task, 'duration_secs': 0.385325} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.870583] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1541.870954] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1541.870954] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52884745-d484-6ae8-085e-71138797e23b" [ 1541.870954] env[62820]: _type = "Task" [ 1541.870954] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.871709] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51776822-dad0-4dc5-9853-f5a6efe43bad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.883234] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52884745-d484-6ae8-085e-71138797e23b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.899055] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a410c3-65d3-403d-97b8-c262a2111394 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.933030] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1541.933361] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-967d7cad-86c8-455a-8461-645b3e33da7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.947494] env[62820]: DEBUG nova.scheduler.client.report [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1541.953236] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1541.953236] env[62820]: value = "task-1695708" [ 1541.953236] env[62820]: _type = "Task" [ 1541.953236] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.965995] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695706, 'name': ReconfigVM_Task, 'duration_secs': 0.814531} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.969945] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 871195a8-8b7d-433f-a0b5-c570c65faf1e/871195a8-8b7d-433f-a0b5-c570c65faf1e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1541.971378] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5fe4a69-879c-4340-b14e-06d2bac5c672 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.973172] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1541.973330] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1541.973496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.973640] env[62820]: DEBUG oslo_concurrency.lockutils [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.973821] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1541.974319] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7329ec9f-ad70-42db-9b45-07095393a872 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.982996] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1541.982996] env[62820]: value = "task-1695709" [ 1541.982996] env[62820]: _type = "Task" [ 1541.982996] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.986821] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1541.987069] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1541.989257] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0e6ee67-3f74-49cd-a11c-8ef460bc75e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.997191] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695709, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.998508] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1541.998508] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ffa133-7c5d-164d-88f3-843dd90727b9" [ 1541.998508] env[62820]: _type = "Task" [ 1541.998508] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.006818] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ffa133-7c5d-164d-88f3-843dd90727b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.040206] env[62820]: DEBUG nova.network.neutron [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Successfully updated port: 89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1542.385658] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52884745-d484-6ae8-085e-71138797e23b, 'name': SearchDatastore_Task, 'duration_secs': 0.033969} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.386080] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.386387] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1542.386541] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.453678] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.133s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.456637] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.849s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.456927] env[62820]: DEBUG nova.objects.instance [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'resources' on Instance uuid 09ab63ae-fd36-4915-8c59-9d9bc5833288 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1542.478633] env[62820]: INFO nova.scheduler.client.report [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Deleted allocations for instance ee188979-e740-4125-a17f-1c02ef9588f1 [ 1542.495396] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695709, 'name': Rename_Task, 'duration_secs': 0.154529} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.495527] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1542.495771] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1d7333f-65c3-4683-bea4-c282c8e885b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.504342] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1542.504342] env[62820]: value = "task-1695710" [ 1542.504342] env[62820]: _type = "Task" [ 1542.504342] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.511065] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ffa133-7c5d-164d-88f3-843dd90727b9, 'name': SearchDatastore_Task, 'duration_secs': 0.021145} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.512268] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e970bb6-9626-4e46-8109-7694865b26b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.517404] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695710, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.521074] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1542.521074] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5238372e-2b49-dc62-4ff3-7555ae50c3cf" [ 1542.521074] env[62820]: _type = "Task" [ 1542.521074] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.532650] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5238372e-2b49-dc62-4ff3-7555ae50c3cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.544095] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.544229] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.544300] env[62820]: DEBUG nova.network.neutron [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1542.627222] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "15b6eda1-db87-45d1-a0c6-320386b02e12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.627446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.667934] env[62820]: DEBUG nova.compute.manager [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received event network-vif-plugged-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1542.668213] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.668393] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] Lock "a8486f52-998d-4308-813a-9c651e2eb093-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.668688] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] Lock "a8486f52-998d-4308-813a-9c651e2eb093-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.668769] env[62820]: DEBUG nova.compute.manager [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] No waiting events found dispatching network-vif-plugged-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1542.668886] env[62820]: WARNING nova.compute.manager [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received unexpected event network-vif-plugged-89ab754d-6988-4b28-882b-5f352eda86ec for instance with vm_state building and task_state spawning. [ 1542.669074] env[62820]: DEBUG nova.compute.manager [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received event network-changed-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1542.669308] env[62820]: DEBUG nova.compute.manager [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Refreshing instance network info cache due to event network-changed-89ab754d-6988-4b28-882b-5f352eda86ec. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1542.669383] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] Acquiring lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.989792] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a0c6a506-285a-4901-a31c-b1bff31c0f64 tempest-InstanceActionsV221TestJSON-535344610 tempest-InstanceActionsV221TestJSON-535344610-project-member] Lock "ee188979-e740-4125-a17f-1c02ef9588f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.915s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.016187] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695710, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.031818] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5238372e-2b49-dc62-4ff3-7555ae50c3cf, 'name': SearchDatastore_Task, 'duration_secs': 0.014214} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.031818] env[62820]: DEBUG oslo_concurrency.lockutils [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.032286] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. {{(pid=62820) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1543.032431] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.032510] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1543.034013] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2a7dcda-b178-4166-8261-9986299ba9af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.035524] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aeecc4fc-ef8e-469a-a40a-cd36a24b3e6d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.050987] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1543.050987] env[62820]: value = "task-1695711" [ 1543.050987] env[62820]: _type = "Task" [ 1543.050987] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.055869] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1543.056137] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1543.060885] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-911a317a-376f-43a3-9228-d59db81cd683 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.068773] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1543.068773] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520fade0-331e-8d02-50c2-504edc38380a" [ 1543.068773] env[62820]: _type = "Task" [ 1543.068773] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.071981] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.083454] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520fade0-331e-8d02-50c2-504edc38380a, 'name': SearchDatastore_Task, 'duration_secs': 0.012148} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.087804] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93b55b03-2fe5-4c70-bd3f-a1ceaa01c088 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.095293] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1543.095293] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52975320-b75d-8fc3-2169-a245381ab3f9" [ 1543.095293] env[62820]: _type = "Task" [ 1543.095293] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.107871] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52975320-b75d-8fc3-2169-a245381ab3f9, 'name': SearchDatastore_Task, 'duration_secs': 0.011035} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.113311] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.113642] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7a755ef6-67bc-4242-9343-c54c8566adf8/7a755ef6-67bc-4242-9343-c54c8566adf8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1543.118187] env[62820]: DEBUG nova.network.neutron [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1543.120168] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f406e477-56d1-41bf-9dab-a2f2ea4734bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.130042] env[62820]: DEBUG nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1543.134179] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1543.134179] env[62820]: value = "task-1695712" [ 1543.134179] env[62820]: _type = "Task" [ 1543.134179] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.145136] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.434021] env[62820]: DEBUG nova.network.neutron [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating instance_info_cache with network_info: [{"id": "89ab754d-6988-4b28-882b-5f352eda86ec", "address": "fa:16:3e:6f:b6:6d", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ab754d-69", "ovs_interfaceid": "89ab754d-6988-4b28-882b-5f352eda86ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.468720] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89cc41f5-5693-47b7-8392-78de10bed525 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.487306] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3901f08c-57f0-4d44-bc22-c631c3c37d7d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.529077] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a6701e-1cb9-4b1f-98fb-cc18f54b6911 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.543120] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9afce24-b713-4487-8411-4b14882d17f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.547473] env[62820]: DEBUG oslo_vmware.api [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695710, 'name': PowerOnVM_Task, 'duration_secs': 0.913333} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.547767] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1543.548819] env[62820]: INFO nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Took 10.51 seconds to spawn the instance on the hypervisor. [ 1543.548931] env[62820]: DEBUG nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1543.550544] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76d6343-a16b-408b-abba-dd9e53d49211 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.564720] env[62820]: DEBUG nova.compute.provider_tree [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1543.587270] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695711, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.652265] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695712, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.659267] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.937850] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.939135] env[62820]: DEBUG nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Instance network_info: |[{"id": "89ab754d-6988-4b28-882b-5f352eda86ec", "address": "fa:16:3e:6f:b6:6d", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ab754d-69", "ovs_interfaceid": "89ab754d-6988-4b28-882b-5f352eda86ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1543.939135] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] Acquired lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.939135] env[62820]: DEBUG nova.network.neutron [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Refreshing network info cache for port 89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1543.940130] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:b6:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89ab754d-6988-4b28-882b-5f352eda86ec', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1543.948311] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating folder: Project (bfe9869537de4334a0c8ce91fd062659). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1543.952052] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73e4be42-5986-4ed9-ab71-41ac477ca8a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.965960] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Created folder: Project (bfe9869537de4334a0c8ce91fd062659) in parent group-v353379. [ 1543.966185] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating folder: Instances. Parent ref: group-v353562. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1543.966441] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e77abac0-1a68-4723-872a-ef67892acfc3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.980559] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Created folder: Instances in parent group-v353562. [ 1543.980827] env[62820]: DEBUG oslo.service.loopingcall [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1543.981056] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1543.981340] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-641ad659-24a8-4aca-a222-6e43c75f6831 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.002763] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1544.002763] env[62820]: value = "task-1695715" [ 1544.002763] env[62820]: _type = "Task" [ 1544.002763] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.015023] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695715, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.086260] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695711, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590663} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.089389] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. [ 1544.089389] env[62820]: INFO nova.compute.manager [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Took 45.36 seconds to build instance. [ 1544.094027] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83c60b8-9409-43da-beb3-c89610f2a523 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.883908] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d652d864-031f-426b-8cda-54d0e6d9303c tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "871195a8-8b7d-433f-a0b5-c570c65faf1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.176s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.891575] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1544.892701] env[62820]: ERROR nova.scheduler.client.report [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [req-b7937acd-7183-4787-9460-d942d4e6fe46] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b7937acd-7183-4787-9460-d942d4e6fe46"}]} [ 1544.901374] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7538d31-f56a-4c12-aef6-7ffc14769863 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.925249] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695715, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.929571] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.839671} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.930903] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1544.930903] env[62820]: value = "task-1695716" [ 1544.930903] env[62820]: _type = "Task" [ 1544.930903] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.930903] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7a755ef6-67bc-4242-9343-c54c8566adf8/7a755ef6-67bc-4242-9343-c54c8566adf8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1544.930903] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1544.931295] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebd30b23-b7e6-43c0-b0ea-7e522e1c171c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.936898] env[62820]: DEBUG nova.scheduler.client.report [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1544.948727] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695716, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.950373] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1544.950373] env[62820]: value = "task-1695717" [ 1544.950373] env[62820]: _type = "Task" [ 1544.950373] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.956525] env[62820]: DEBUG nova.scheduler.client.report [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1544.956760] env[62820]: DEBUG nova.compute.provider_tree [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1544.963676] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695717, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.971419] env[62820]: DEBUG nova.scheduler.client.report [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1545.008484] env[62820]: DEBUG nova.scheduler.client.report [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1545.255392] env[62820]: DEBUG nova.network.neutron [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updated VIF entry in instance network info cache for port 89ab754d-6988-4b28-882b-5f352eda86ec. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1545.255894] env[62820]: DEBUG nova.network.neutron [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating instance_info_cache with network_info: [{"id": "89ab754d-6988-4b28-882b-5f352eda86ec", "address": "fa:16:3e:6f:b6:6d", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ab754d-69", "ovs_interfaceid": "89ab754d-6988-4b28-882b-5f352eda86ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.394317] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695715, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.441307] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695716, 'name': ReconfigVM_Task, 'duration_secs': 0.343963} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.441606] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1545.442633] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4eb0db3-1b48-42ac-9f65-a6421c4ecef5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.477389] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06ea5d6d-297a-429f-836c-8b0f289c7343 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.489591] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfd5182-86bc-4dcb-b688-584af8a77418 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.501192] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695717, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135959} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.504245] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1545.504341] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1545.504341] env[62820]: value = "task-1695718" [ 1545.504341] env[62820]: _type = "Task" [ 1545.504341] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.506028] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97b025f-b690-4503-8629-f9c7f6008d20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.508332] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d685092-958b-4855-934b-4eebef7aef34 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.566225] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 7a755ef6-67bc-4242-9343-c54c8566adf8/7a755ef6-67bc-4242-9343-c54c8566adf8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1545.567179] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5eb401db-13ed-4722-a480-5329e8d21aab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.582683] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c81a5e-4d5b-423a-8003-18545246011e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.593775] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7920371-9437-4a92-9581-82cc5d9d983d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.598086] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1545.598086] env[62820]: value = "task-1695719" [ 1545.598086] env[62820]: _type = "Task" [ 1545.598086] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.610317] env[62820]: DEBUG nova.compute.provider_tree [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1545.617466] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695719, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.763145] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d768edc-ebe2-44a6-9d75-c4f6bc831f3d req-a771acb8-ab07-42c7-8d13-a76a4fb3a72f service nova] Releasing lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.903490] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695715, 'name': CreateVM_Task, 'duration_secs': 1.884184} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.903716] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1545.904727] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.904916] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.905329] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1545.905689] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9255fe07-8efa-4e82-a756-aec9516c83df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.911929] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1545.911929] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5264def9-aec3-aae1-145e-e0abcdbd412c" [ 1545.911929] env[62820]: _type = "Task" [ 1545.911929] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.924598] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5264def9-aec3-aae1-145e-e0abcdbd412c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.023370] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695718, 'name': ReconfigVM_Task, 'duration_secs': 0.186268} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.024104] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1546.024726] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97348d5f-4972-4777-bf49-71374cd514b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.039020] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1546.039020] env[62820]: value = "task-1695720" [ 1546.039020] env[62820]: _type = "Task" [ 1546.039020] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.051622] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695720, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.111164] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695719, 'name': ReconfigVM_Task, 'duration_secs': 0.276727} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.111530] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 7a755ef6-67bc-4242-9343-c54c8566adf8/7a755ef6-67bc-4242-9343-c54c8566adf8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1546.112221] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e536088-fbbf-4544-afd2-fdccb7ad2b01 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.123907] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1546.123907] env[62820]: value = "task-1695721" [ 1546.123907] env[62820]: _type = "Task" [ 1546.123907] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.133083] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695721, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.154276] env[62820]: DEBUG nova.scheduler.client.report [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1546.154657] env[62820]: DEBUG nova.compute.provider_tree [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 88 to 89 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1546.155039] env[62820]: DEBUG nova.compute.provider_tree [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1546.292009] env[62820]: DEBUG nova.compute.manager [req-a6bd4d8e-42f8-44e5-bb9b-a01cfc7afa9d req-b29e040b-c13e-464f-a2a8-90f6d105c61a service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Received event network-changed-b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1546.292228] env[62820]: DEBUG nova.compute.manager [req-a6bd4d8e-42f8-44e5-bb9b-a01cfc7afa9d req-b29e040b-c13e-464f-a2a8-90f6d105c61a service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Refreshing instance network info cache due to event network-changed-b7583a76-a88b-4483-b4ac-82ca58435896. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1546.292439] env[62820]: DEBUG oslo_concurrency.lockutils [req-a6bd4d8e-42f8-44e5-bb9b-a01cfc7afa9d req-b29e040b-c13e-464f-a2a8-90f6d105c61a service nova] Acquiring lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.293108] env[62820]: DEBUG oslo_concurrency.lockutils [req-a6bd4d8e-42f8-44e5-bb9b-a01cfc7afa9d req-b29e040b-c13e-464f-a2a8-90f6d105c61a service nova] Acquired lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.293302] env[62820]: DEBUG nova.network.neutron [req-a6bd4d8e-42f8-44e5-bb9b-a01cfc7afa9d req-b29e040b-c13e-464f-a2a8-90f6d105c61a service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Refreshing network info cache for port b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.427709] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5264def9-aec3-aae1-145e-e0abcdbd412c, 'name': SearchDatastore_Task, 'duration_secs': 0.010691} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.427709] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.427709] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1546.427709] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.430704] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.430704] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.430704] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50281320-8909-495a-8a4a-97abd0bb7fda {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.442995] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.443364] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1546.445554] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e9a4c41-4523-4f50-9bc0-067ebb5ac64c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.452659] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1546.452659] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524f2926-0fac-203e-b06d-52f4ed5af101" [ 1546.452659] env[62820]: _type = "Task" [ 1546.452659] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.465308] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524f2926-0fac-203e-b06d-52f4ed5af101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.550959] env[62820]: DEBUG oslo_vmware.api [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695720, 'name': PowerOnVM_Task, 'duration_secs': 0.434159} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.551462] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1546.555335] env[62820]: DEBUG nova.compute.manager [None req-45dde690-7f16-43db-b9bc-727f2b38ead1 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1546.556186] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2e9ff6-19a0-43e4-b9a5-763bc8140170 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.634352] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695721, 'name': Rename_Task, 'duration_secs': 0.166484} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.637072] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1546.637072] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0f4b29e-bf24-4bc9-8424-31625cb020ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.642646] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1546.642646] env[62820]: value = "task-1695722" [ 1546.642646] env[62820]: _type = "Task" [ 1546.642646] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.651629] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.661031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.205s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.663769] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 37.383s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.685408] env[62820]: INFO nova.scheduler.client.report [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted allocations for instance 09ab63ae-fd36-4915-8c59-9d9bc5833288 [ 1546.964700] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524f2926-0fac-203e-b06d-52f4ed5af101, 'name': SearchDatastore_Task, 'duration_secs': 0.037616} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.965618] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b64d9203-c2b8-4b17-ae8c-642c0fe4c8ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.973404] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1546.973404] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526e402c-628d-caac-6e3b-8469d5f8f8d6" [ 1546.973404] env[62820]: _type = "Task" [ 1546.973404] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.983163] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526e402c-628d-caac-6e3b-8469d5f8f8d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.160758] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695722, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.193728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8777d720-3cbc-41df-964c-f29a46b956ba tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "09ab63ae-fd36-4915-8c59-9d9bc5833288" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.072s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.218034] env[62820]: DEBUG nova.network.neutron [req-a6bd4d8e-42f8-44e5-bb9b-a01cfc7afa9d req-b29e040b-c13e-464f-a2a8-90f6d105c61a service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Updated VIF entry in instance network info cache for port b7583a76-a88b-4483-b4ac-82ca58435896. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.218446] env[62820]: DEBUG nova.network.neutron [req-a6bd4d8e-42f8-44e5-bb9b-a01cfc7afa9d req-b29e040b-c13e-464f-a2a8-90f6d105c61a service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Updating instance_info_cache with network_info: [{"id": "b7583a76-a88b-4483-b4ac-82ca58435896", "address": "fa:16:3e:1e:a0:a4", "network": {"id": "c8a0d5ff-b4cf-426e-ac21-8908c51fe8d8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1793043112-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29e82741460d4dfe9ec37eba75f9e95b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a55f45a-d631-4ebc-b73b-8a30bd0a32a8", "external-id": "nsx-vlan-transportzone-303", "segmentation_id": 303, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7583a76-a8", "ovs_interfaceid": "b7583a76-a88b-4483-b4ac-82ca58435896", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.486032] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526e402c-628d-caac-6e3b-8469d5f8f8d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010668} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.486032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.486358] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a8486f52-998d-4308-813a-9c651e2eb093/a8486f52-998d-4308-813a-9c651e2eb093.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1547.486510] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dadcc8bd-dd2e-4919-879b-3035ceaed403 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.495829] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1547.495829] env[62820]: value = "task-1695723" [ 1547.495829] env[62820]: _type = "Task" [ 1547.495829] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.508765] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.657135] env[62820]: DEBUG oslo_vmware.api [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1695722, 'name': PowerOnVM_Task, 'duration_secs': 0.861979} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.658095] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1547.658477] env[62820]: INFO nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Took 9.90 seconds to spawn the instance on the hypervisor. [ 1547.658812] env[62820]: DEBUG nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1547.659928] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8968205-9cf7-4e73-9e19-e76bf6a6c455 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.683701] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Applying migration context for instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 as it has an incoming, in-progress migration 17065b4d-ea93-42e5-aca0-e553248f0e35. Migration status is error {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1547.687162] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=62820) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1547.721219] env[62820]: DEBUG oslo_concurrency.lockutils [req-a6bd4d8e-42f8-44e5-bb9b-a01cfc7afa9d req-b29e040b-c13e-464f-a2a8-90f6d105c61a service nova] Releasing lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.725146] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 069f58d6-f6bc-4ded-8274-6fed7c2f45b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.725146] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.725146] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a06d736c-a704-46e8-a6f7-85d8be40804f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.725146] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 58a26c98-cbf9-491f-8d2c-20281c3d7771 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1547.725146] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance aa98dbb0-5ff7-4da5-a365-2b55a8bd2216 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1547.725146] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.725146] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b6c58867-914e-4e6e-8092-fc8991dc87f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.725840] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance ab21fd61-3a44-42fa-92be-51214b0a9a1e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1547.726246] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b7c9f518-c908-42cc-ba09-59b0f8431f68 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.726538] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 9114a81d-86a9-493b-9c07-c4724a0588ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.728173] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b89d32f8-0675-4b0c-977e-b7900e62bdd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.728173] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 7e4596bf-a8b0-4502-b80b-da372d1fba06 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1547.728173] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 207efed9-20ea-4b9e-bca2-45521b41de6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.728173] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 492db939-78f4-4642-89dd-a01fa94f41b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.728610] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance eafe98b7-a67d-4bab-bfc0-8367ae069d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.728797] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0dd0e112-7a7c-4b37-8938-bb98aab2d485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.728962] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 871195a8-8b7d-433f-a0b5-c570c65faf1e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.729170] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 7a755ef6-67bc-4242-9343-c54c8566adf8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.729384] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a8486f52-998d-4308-813a-9c651e2eb093 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1548.006055] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695723, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469759} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.006797] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a8486f52-998d-4308-813a-9c651e2eb093/a8486f52-998d-4308-813a-9c651e2eb093.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1548.007018] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1548.007328] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f85c6e4b-269e-4dff-89a0-a5a27bec1838 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.015051] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1548.015051] env[62820]: value = "task-1695724" [ 1548.015051] env[62820]: _type = "Task" [ 1548.015051] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.023269] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695724, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.180199] env[62820]: INFO nova.compute.manager [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Took 45.58 seconds to build instance. [ 1548.232423] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 210277a2-dd10-4e08-8627-4b025a554410 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1548.526173] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695724, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170342} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.527192] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1548.527562] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5ac281-b165-4942-9468-66e11826cf32 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.559544] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] a8486f52-998d-4308-813a-9c651e2eb093/a8486f52-998d-4308-813a-9c651e2eb093.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1548.559843] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98aa8575-5464-4f3d-a1a1-6ece94e11073 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.581209] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1548.581209] env[62820]: value = "task-1695725" [ 1548.581209] env[62820]: _type = "Task" [ 1548.581209] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.589924] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695725, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.681856] env[62820]: DEBUG oslo_concurrency.lockutils [None req-63d22d05-059a-4bac-9f74-6e6238ba8abe tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "7a755ef6-67bc-4242-9343-c54c8566adf8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.180s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.735679] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a150a0d8-afcc-4a5b-a014-2c25a9bc4f07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1548.872423] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "4ae63ae5-0306-4540-be88-6e7d909c38a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.873886] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "4ae63ae5-0306-4540-be88-6e7d909c38a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.093489] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695725, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.147419] env[62820]: INFO nova.compute.manager [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Rescuing [ 1549.147807] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.148096] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.148332] env[62820]: DEBUG nova.network.neutron [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1549.241069] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 11843b38-3ce4-42a7-b855-a9d0b473e796 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1549.370075] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "2587a273-0115-483a-ba5e-994c87bbc4d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.370445] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "2587a273-0115-483a-ba5e-994c87bbc4d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.378226] env[62820]: DEBUG nova.compute.manager [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1549.423789] env[62820]: DEBUG nova.compute.manager [req-cf7f0335-d74e-437a-a95c-a5e8b14f4c70 req-ff4e1ef6-c511-44f8-85bf-af0dcd4fd5be service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Received event network-changed-b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1549.423789] env[62820]: DEBUG nova.compute.manager [req-cf7f0335-d74e-437a-a95c-a5e8b14f4c70 req-ff4e1ef6-c511-44f8-85bf-af0dcd4fd5be service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Refreshing instance network info cache due to event network-changed-b7583a76-a88b-4483-b4ac-82ca58435896. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1549.423789] env[62820]: DEBUG oslo_concurrency.lockutils [req-cf7f0335-d74e-437a-a95c-a5e8b14f4c70 req-ff4e1ef6-c511-44f8-85bf-af0dcd4fd5be service nova] Acquiring lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.423789] env[62820]: DEBUG oslo_concurrency.lockutils [req-cf7f0335-d74e-437a-a95c-a5e8b14f4c70 req-ff4e1ef6-c511-44f8-85bf-af0dcd4fd5be service nova] Acquired lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.423985] env[62820]: DEBUG nova.network.neutron [req-cf7f0335-d74e-437a-a95c-a5e8b14f4c70 req-ff4e1ef6-c511-44f8-85bf-af0dcd4fd5be service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Refreshing network info cache for port b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1549.596026] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695725, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.745553] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 93e1a842-d598-4798-88ad-622ae5dbf057 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1549.873416] env[62820]: DEBUG nova.compute.manager [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1549.903959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.012190] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.012469] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.006s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.095200] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695725, 'name': ReconfigVM_Task, 'duration_secs': 1.264204} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.095500] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Reconfigured VM instance instance-0000003b to attach disk [datastore1] a8486f52-998d-4308-813a-9c651e2eb093/a8486f52-998d-4308-813a-9c651e2eb093.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1550.096121] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac82b820-6837-401b-a330-b1486887448d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.104911] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1550.104911] env[62820]: value = "task-1695726" [ 1550.104911] env[62820]: _type = "Task" [ 1550.104911] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.116786] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695726, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.251029] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b7806d81-eb2d-4724-8c40-ed88c8c77870 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1550.399522] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.458195] env[62820]: DEBUG nova.network.neutron [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Updating instance_info_cache with network_info: [{"id": "8ba6813f-c30f-416d-b888-4a33a10698ef", "address": "fa:16:3e:0d:dc:a3", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba6813f-c3", "ovs_interfaceid": "8ba6813f-c30f-416d-b888-4a33a10698ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.601411] env[62820]: DEBUG nova.network.neutron [req-cf7f0335-d74e-437a-a95c-a5e8b14f4c70 req-ff4e1ef6-c511-44f8-85bf-af0dcd4fd5be service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Updated VIF entry in instance network info cache for port b7583a76-a88b-4483-b4ac-82ca58435896. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1550.601563] env[62820]: DEBUG nova.network.neutron [req-cf7f0335-d74e-437a-a95c-a5e8b14f4c70 req-ff4e1ef6-c511-44f8-85bf-af0dcd4fd5be service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Updating instance_info_cache with network_info: [{"id": "b7583a76-a88b-4483-b4ac-82ca58435896", "address": "fa:16:3e:1e:a0:a4", "network": {"id": "c8a0d5ff-b4cf-426e-ac21-8908c51fe8d8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1793043112-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29e82741460d4dfe9ec37eba75f9e95b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a55f45a-d631-4ebc-b73b-8a30bd0a32a8", "external-id": "nsx-vlan-transportzone-303", "segmentation_id": 303, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7583a76-a8", "ovs_interfaceid": "b7583a76-a88b-4483-b4ac-82ca58435896", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.619740] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695726, 'name': Rename_Task, 'duration_secs': 0.181476} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.620419] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1550.620768] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b64d721-5991-4f6e-9995-8e9e2f77b516 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.629474] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1550.629474] env[62820]: value = "task-1695727" [ 1550.629474] env[62820]: _type = "Task" [ 1550.629474] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.640687] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695727, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.658342] env[62820]: DEBUG nova.compute.manager [req-c32f6720-0471-4ef4-a099-e2ac21fc5439 req-e4585383-b802-41a2-a505-f5bc21184acd service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Received event network-changed-0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1550.658607] env[62820]: DEBUG nova.compute.manager [req-c32f6720-0471-4ef4-a099-e2ac21fc5439 req-e4585383-b802-41a2-a505-f5bc21184acd service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Refreshing instance network info cache due to event network-changed-0c860660-820e-425d-963c-906681be61b6. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1550.661471] env[62820]: DEBUG oslo_concurrency.lockutils [req-c32f6720-0471-4ef4-a099-e2ac21fc5439 req-e4585383-b802-41a2-a505-f5bc21184acd service nova] Acquiring lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.661471] env[62820]: DEBUG oslo_concurrency.lockutils [req-c32f6720-0471-4ef4-a099-e2ac21fc5439 req-e4585383-b802-41a2-a505-f5bc21184acd service nova] Acquired lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.661471] env[62820]: DEBUG nova.network.neutron [req-c32f6720-0471-4ef4-a099-e2ac21fc5439 req-e4585383-b802-41a2-a505-f5bc21184acd service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Refreshing network info cache for port 0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1550.757014] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 3a325dbf-87fb-4f7e-a665-e5d181333a5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1550.812368] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquiring lock "871195a8-8b7d-433f-a0b5-c570c65faf1e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.812565] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "871195a8-8b7d-433f-a0b5-c570c65faf1e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.812773] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquiring lock "871195a8-8b7d-433f-a0b5-c570c65faf1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.812957] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "871195a8-8b7d-433f-a0b5-c570c65faf1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.813187] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "871195a8-8b7d-433f-a0b5-c570c65faf1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.815651] env[62820]: INFO nova.compute.manager [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Terminating instance [ 1550.960814] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.104380] env[62820]: DEBUG oslo_concurrency.lockutils [req-cf7f0335-d74e-437a-a95c-a5e8b14f4c70 req-ff4e1ef6-c511-44f8-85bf-af0dcd4fd5be service nova] Releasing lock "refresh_cache-871195a8-8b7d-433f-a0b5-c570c65faf1e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.140584] env[62820]: DEBUG oslo_vmware.api [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1695727, 'name': PowerOnVM_Task, 'duration_secs': 0.491322} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.140779] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1551.140982] env[62820]: INFO nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Took 10.91 seconds to spawn the instance on the hypervisor. [ 1551.141185] env[62820]: DEBUG nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1551.141976] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ee77ee-6be2-42be-ac4d-cfa885b6ab52 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.261056] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1551.261697] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1551.261697] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1551.326957] env[62820]: DEBUG nova.compute.manager [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1551.327214] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1551.330454] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f18bc81-b1b5-4540-a495-719fbdc85f9b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.341025] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1551.341291] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-806270c8-53d8-4c65-8c7d-98341112fbec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.350385] env[62820]: DEBUG oslo_vmware.api [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1551.350385] env[62820]: value = "task-1695728" [ 1551.350385] env[62820]: _type = "Task" [ 1551.350385] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.360401] env[62820]: DEBUG oslo_vmware.api [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.443979] env[62820]: DEBUG nova.network.neutron [req-c32f6720-0471-4ef4-a099-e2ac21fc5439 req-e4585383-b802-41a2-a505-f5bc21184acd service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Updated VIF entry in instance network info cache for port 0c860660-820e-425d-963c-906681be61b6. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1551.444457] env[62820]: DEBUG nova.network.neutron [req-c32f6720-0471-4ef4-a099-e2ac21fc5439 req-e4585383-b802-41a2-a505-f5bc21184acd service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Updating instance_info_cache with network_info: [{"id": "0c860660-820e-425d-963c-906681be61b6", "address": "fa:16:3e:e8:17:73", "network": {"id": "3af0e10a-e365-401a-96cf-96144942fa3b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-82307243-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2789820b5ab4db18a288fb26a7efe46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c860660-82", "ovs_interfaceid": "0c860660-820e-425d-963c-906681be61b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.660864] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9918a98f-860e-4d95-b6e4-c9014a35694b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.669325] env[62820]: INFO nova.compute.manager [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Took 48.20 seconds to build instance. [ 1551.674297] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67be7de-2e1b-4c3b-b5ef-f337a19c7b95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.710019] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e66d9b5-474e-4453-a766-906163feec41 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.717030] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66885a4c-5b2d-4c91-b27c-ce6fbb7fa152 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.731551] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1551.860441] env[62820]: DEBUG oslo_vmware.api [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695728, 'name': PowerOffVM_Task, 'duration_secs': 0.20287} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.860792] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1551.860998] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1551.861254] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28994bdd-78fc-46a5-b847-7dc8c0e7c622 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.942644] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1551.942870] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1551.943055] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Deleting the datastore file [datastore1] 871195a8-8b7d-433f-a0b5-c570c65faf1e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1551.943338] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67dce6c7-47ce-4cec-9171-7378f79cc356 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.947337] env[62820]: DEBUG oslo_concurrency.lockutils [req-c32f6720-0471-4ef4-a099-e2ac21fc5439 req-e4585383-b802-41a2-a505-f5bc21184acd service nova] Releasing lock "refresh_cache-7a755ef6-67bc-4242-9343-c54c8566adf8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.950867] env[62820]: DEBUG oslo_vmware.api [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for the task: (returnval){ [ 1551.950867] env[62820]: value = "task-1695730" [ 1551.950867] env[62820]: _type = "Task" [ 1551.950867] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.959486] env[62820]: DEBUG oslo_vmware.api [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.001015] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1552.001340] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e5c8351-67d4-4904-af0d-c90a43401493 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.011015] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1552.011015] env[62820]: value = "task-1695731" [ 1552.011015] env[62820]: _type = "Task" [ 1552.011015] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.018840] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.174595] env[62820]: DEBUG oslo_concurrency.lockutils [None req-29c8151c-aa07-476b-a803-e6d74f78460e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.985s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.234380] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1552.463169] env[62820]: DEBUG oslo_vmware.api [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Task: {'id': task-1695730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238989} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.463487] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1552.463731] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1552.463915] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1552.464137] env[62820]: INFO nova.compute.manager [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1552.464390] env[62820]: DEBUG oslo.service.loopingcall [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.464597] env[62820]: DEBUG nova.compute.manager [-] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1552.464736] env[62820]: DEBUG nova.network.neutron [-] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1552.520840] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695731, 'name': PowerOffVM_Task, 'duration_secs': 0.252663} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.521140] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1552.521957] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5816024e-76da-433f-8fc4-c16171d93403 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.542015] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a79c77-18ce-44ef-a681-a18ec0fc3603 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.583384] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1552.583705] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dd2a291-4d9a-4c11-86b8-7debd44ca4a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.591973] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1552.591973] env[62820]: value = "task-1695732" [ 1552.591973] env[62820]: _type = "Task" [ 1552.591973] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.605300] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1552.605618] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1552.605862] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.606552] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.606552] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1552.606552] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5acc360c-a00a-4ce3-bd46-94be2a566da6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.639144] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1552.639144] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1552.639607] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4897e65-4a07-48e0-be6e-5621b782b47d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.648739] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1552.648739] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529058e0-3abd-104e-c8af-6e4bb437e7f9" [ 1552.648739] env[62820]: _type = "Task" [ 1552.648739] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.657566] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529058e0-3abd-104e-c8af-6e4bb437e7f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.677430] env[62820]: DEBUG nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1552.739678] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1552.739940] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.077s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.740249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.459s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.746044] env[62820]: INFO nova.compute.claims [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1552.749025] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.750048] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Cleaning up deleted instances {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11586}} [ 1552.825394] env[62820]: DEBUG nova.compute.manager [req-0453f0ca-d37a-4d81-ac9a-719147a29ef3 req-edd61a9a-65b9-4e74-b3d2-0823123e1381 service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Received event network-vif-deleted-b7583a76-a88b-4483-b4ac-82ca58435896 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1552.825625] env[62820]: INFO nova.compute.manager [req-0453f0ca-d37a-4d81-ac9a-719147a29ef3 req-edd61a9a-65b9-4e74-b3d2-0823123e1381 service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Neutron deleted interface b7583a76-a88b-4483-b4ac-82ca58435896; detaching it from the instance and deleting it from the info cache [ 1552.825865] env[62820]: DEBUG nova.network.neutron [req-0453f0ca-d37a-4d81-ac9a-719147a29ef3 req-edd61a9a-65b9-4e74-b3d2-0823123e1381 service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.040459] env[62820]: DEBUG nova.compute.manager [req-e2523eeb-d887-440b-ad62-a497dab7c3b2 req-ae3c1ca9-6423-4402-aed6-5d3ae55412bd service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received event network-changed-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1553.040459] env[62820]: DEBUG nova.compute.manager [req-e2523eeb-d887-440b-ad62-a497dab7c3b2 req-ae3c1ca9-6423-4402-aed6-5d3ae55412bd service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Refreshing instance network info cache due to event network-changed-89ab754d-6988-4b28-882b-5f352eda86ec. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1553.040459] env[62820]: DEBUG oslo_concurrency.lockutils [req-e2523eeb-d887-440b-ad62-a497dab7c3b2 req-ae3c1ca9-6423-4402-aed6-5d3ae55412bd service nova] Acquiring lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.040459] env[62820]: DEBUG oslo_concurrency.lockutils [req-e2523eeb-d887-440b-ad62-a497dab7c3b2 req-ae3c1ca9-6423-4402-aed6-5d3ae55412bd service nova] Acquired lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.040459] env[62820]: DEBUG nova.network.neutron [req-e2523eeb-d887-440b-ad62-a497dab7c3b2 req-ae3c1ca9-6423-4402-aed6-5d3ae55412bd service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Refreshing network info cache for port 89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.160034] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529058e0-3abd-104e-c8af-6e4bb437e7f9, 'name': SearchDatastore_Task, 'duration_secs': 0.015143} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.160777] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9d5ebad-d340-4e01-b1bc-5e26739aa4b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.166575] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1553.166575] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5226e67c-9080-3f91-cfa8-4539d346cb6a" [ 1553.166575] env[62820]: _type = "Task" [ 1553.166575] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.175539] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5226e67c-9080-3f91-cfa8-4539d346cb6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.203568] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.205974] env[62820]: DEBUG nova.network.neutron [-] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.266381] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] There are 40 instances to clean {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11595}} [ 1553.266381] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: ee188979-e740-4125-a17f-1c02ef9588f1] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1553.328815] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-414c3be9-3574-4065-9558-40581a180763 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.339856] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2a300d-12ad-4214-97e0-ece5bd320a84 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.374018] env[62820]: DEBUG nova.compute.manager [req-0453f0ca-d37a-4d81-ac9a-719147a29ef3 req-edd61a9a-65b9-4e74-b3d2-0823123e1381 service nova] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Detach interface failed, port_id=b7583a76-a88b-4483-b4ac-82ca58435896, reason: Instance 871195a8-8b7d-433f-a0b5-c570c65faf1e could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1553.679464] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5226e67c-9080-3f91-cfa8-4539d346cb6a, 'name': SearchDatastore_Task, 'duration_secs': 0.012599} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.679841] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.680048] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. {{(pid=62820) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1553.680318] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52aad249-1ec0-4258-999b-f7235bc47f64 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.689289] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1553.689289] env[62820]: value = "task-1695733" [ 1553.689289] env[62820]: _type = "Task" [ 1553.689289] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.698061] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695733, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.711913] env[62820]: INFO nova.compute.manager [-] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Took 1.25 seconds to deallocate network for instance. [ 1553.771730] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: c06e3dcd-b997-497c-865d-5f277695cd7a] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1553.794330] env[62820]: DEBUG nova.network.neutron [req-e2523eeb-d887-440b-ad62-a497dab7c3b2 req-ae3c1ca9-6423-4402-aed6-5d3ae55412bd service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updated VIF entry in instance network info cache for port 89ab754d-6988-4b28-882b-5f352eda86ec. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1553.794709] env[62820]: DEBUG nova.network.neutron [req-e2523eeb-d887-440b-ad62-a497dab7c3b2 req-ae3c1ca9-6423-4402-aed6-5d3ae55412bd service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating instance_info_cache with network_info: [{"id": "89ab754d-6988-4b28-882b-5f352eda86ec", "address": "fa:16:3e:6f:b6:6d", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ab754d-69", "ovs_interfaceid": "89ab754d-6988-4b28-882b-5f352eda86ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.195300] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30193a53-b9d6-4e1e-b601-17a294c6b19c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.205964] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da980179-75f2-4c20-b08f-52cb796f5402 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.209055] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695733, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489572} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.209318] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. [ 1554.210437] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5b5450-3f39-408f-90f6-23b405517331 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.238806] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.252485] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06752301-1ab2-4d69-b917-8952f60c0a8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.261995] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1554.262294] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69363c92-38cb-48b5-906b-dabf13f0b8df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.279019] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 56c371a9-983f-4d5f-8abf-0183736c374c] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1554.282415] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c798ca-17d1-46ab-b7f4-2413c83288ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.287514] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1554.287514] env[62820]: value = "task-1695734" [ 1554.287514] env[62820]: _type = "Task" [ 1554.287514] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.301865] env[62820]: DEBUG nova.compute.provider_tree [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.301865] env[62820]: DEBUG oslo_concurrency.lockutils [req-e2523eeb-d887-440b-ad62-a497dab7c3b2 req-ae3c1ca9-6423-4402-aed6-5d3ae55412bd service nova] Releasing lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.306699] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.782287] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 3fc55bd7-48b9-4e02-af19-f186f5d0c9ae] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1554.798228] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.803277] env[62820]: DEBUG nova.scheduler.client.report [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1555.286328] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 09ab63ae-fd36-4915-8c59-9d9bc5833288] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1555.298740] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.309157] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.569s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.309664] env[62820]: DEBUG nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1555.312379] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.169s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.312570] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.314537] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.543s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.315913] env[62820]: INFO nova.compute.claims [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1555.336959] env[62820]: INFO nova.scheduler.client.report [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted allocations for instance 58a26c98-cbf9-491f-8d2c-20281c3d7771 [ 1555.789951] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 498236b7-3688-4ab1-a604-a9737ba058e8] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1555.801154] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695734, 'name': ReconfigVM_Task, 'duration_secs': 1.227704} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.802057] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Reconfigured VM instance instance-00000037 to attach disk [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1555.802537] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0205a07-95ca-4a6c-ae56-2c5d5effae19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.825484] env[62820]: DEBUG nova.compute.utils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1555.832027] env[62820]: DEBUG nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1555.832214] env[62820]: DEBUG nova.network.neutron [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1555.833962] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50ffeaf8-d3e3-47f6-b932-919a7e485e0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.848112] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fe7af29-3b90-435a-8ff5-e13866f03034 tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "58a26c98-cbf9-491f-8d2c-20281c3d7771" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.292s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.855864] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1555.855864] env[62820]: value = "task-1695735" [ 1555.855864] env[62820]: _type = "Task" [ 1555.855864] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.868601] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695735, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.903259] env[62820]: DEBUG nova.policy [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd41e844bb294c6ab6e3869af994f60a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fef128f5c704730b335b62f6cce0416', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1556.291090] env[62820]: DEBUG nova.network.neutron [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Successfully created port: 66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1556.296006] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 1926c780-faea-40d8-a00b-6ad576349a68] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1556.335694] env[62820]: DEBUG nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1556.369765] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695735, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.537488] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.537728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.537940] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.538137] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.538309] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.540283] env[62820]: INFO nova.compute.manager [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Terminating instance [ 1556.692982] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a80c2b3-6e89-4b43-9c82-452ccb3d4c8d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.700794] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9895a4e3-df85-4743-8051-e29e60e62791 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.730728] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7ea1a9-6064-48cf-a844-bc97cd2fbe80 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.738349] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cead902-3cc9-469f-acd9-190f1d944994 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.751859] env[62820]: DEBUG nova.compute.provider_tree [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1556.799589] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: e45cdcfb-f2ce-4798-8e97-1c3f95e61db3] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1556.866735] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695735, 'name': ReconfigVM_Task, 'duration_secs': 0.969618} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.867203] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1556.867523] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40b5da42-5e99-49fa-811a-49b6221bf779 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.876680] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1556.876680] env[62820]: value = "task-1695736" [ 1556.876680] env[62820]: _type = "Task" [ 1556.876680] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.889307] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695736, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.043725] env[62820]: DEBUG nova.compute.manager [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1557.043970] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1557.044891] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6568ceb6-5f0f-412a-bdde-62b109b91576 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.053091] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1557.053292] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11332c71-1ae0-4e23-ab27-a6de463b0d6c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.061987] env[62820]: DEBUG oslo_vmware.api [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1557.061987] env[62820]: value = "task-1695737" [ 1557.061987] env[62820]: _type = "Task" [ 1557.061987] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.070680] env[62820]: DEBUG oslo_vmware.api [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.255424] env[62820]: DEBUG nova.scheduler.client.report [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1557.302817] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: f186854d-3f0a-4512-83b9-2c946247ccbe] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1557.345625] env[62820]: DEBUG nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1557.373735] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1557.374061] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1557.374227] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1557.374412] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1557.374579] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1557.374826] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1557.374975] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1557.375148] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1557.375313] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1557.375467] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1557.375684] env[62820]: DEBUG nova.virt.hardware [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1557.376520] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a404293b-ef10-455c-b32e-58c2a14c9fa5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.391033] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1937a286-5b12-45de-8cca-475494f4bcb0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.394793] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695736, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.572105] env[62820]: DEBUG oslo_vmware.api [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695737, 'name': PowerOffVM_Task, 'duration_secs': 0.220042} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.572397] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1557.572567] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1557.572821] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d290c93d-c288-4402-970a-78813f9e2ee0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.669556] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1557.669776] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1557.669959] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleting the datastore file [datastore1] 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1557.670302] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d6d37d8-176e-4711-ab51-333a337e0f81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.677429] env[62820]: DEBUG oslo_vmware.api [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1557.677429] env[62820]: value = "task-1695739" [ 1557.677429] env[62820]: _type = "Task" [ 1557.677429] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.685608] env[62820]: DEBUG oslo_vmware.api [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.760862] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.761427] env[62820]: DEBUG nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1557.764081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.536s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.764279] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.766449] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.935s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.768385] env[62820]: INFO nova.compute.claims [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1557.791366] env[62820]: DEBUG nova.compute.manager [req-928f948f-3102-4cfc-bd46-b04c2e6b2860 req-cb64b4b0-0c2a-4684-bc83-ab5932b66bbf service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Received event network-vif-plugged-66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1557.791366] env[62820]: DEBUG oslo_concurrency.lockutils [req-928f948f-3102-4cfc-bd46-b04c2e6b2860 req-cb64b4b0-0c2a-4684-bc83-ab5932b66bbf service nova] Acquiring lock "210277a2-dd10-4e08-8627-4b025a554410-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.791366] env[62820]: DEBUG oslo_concurrency.lockutils [req-928f948f-3102-4cfc-bd46-b04c2e6b2860 req-cb64b4b0-0c2a-4684-bc83-ab5932b66bbf service nova] Lock "210277a2-dd10-4e08-8627-4b025a554410-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.791366] env[62820]: DEBUG oslo_concurrency.lockutils [req-928f948f-3102-4cfc-bd46-b04c2e6b2860 req-cb64b4b0-0c2a-4684-bc83-ab5932b66bbf service nova] Lock "210277a2-dd10-4e08-8627-4b025a554410-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.791366] env[62820]: DEBUG nova.compute.manager [req-928f948f-3102-4cfc-bd46-b04c2e6b2860 req-cb64b4b0-0c2a-4684-bc83-ab5932b66bbf service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] No waiting events found dispatching network-vif-plugged-66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1557.791366] env[62820]: WARNING nova.compute.manager [req-928f948f-3102-4cfc-bd46-b04c2e6b2860 req-cb64b4b0-0c2a-4684-bc83-ab5932b66bbf service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Received unexpected event network-vif-plugged-66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 for instance with vm_state building and task_state spawning. [ 1557.798374] env[62820]: INFO nova.scheduler.client.report [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Deleted allocations for instance aa98dbb0-5ff7-4da5-a365-2b55a8bd2216 [ 1557.806104] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 706d42cd-53d9-4976-bc67-98816a40fff4] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1557.888728] env[62820]: DEBUG oslo_vmware.api [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695736, 'name': PowerOnVM_Task, 'duration_secs': 0.566189} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.889009] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1557.891132] env[62820]: DEBUG nova.network.neutron [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Successfully updated port: 66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1557.892797] env[62820]: DEBUG nova.compute.manager [None req-ca92d460-ed9e-4234-8308-1830097dc035 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1557.893583] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92797b82-4360-4b1b-893e-054a7af9a678 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.188197] env[62820]: DEBUG oslo_vmware.api [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160927} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.188483] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1558.188665] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1558.188839] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1558.189023] env[62820]: INFO nova.compute.manager [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1558.189297] env[62820]: DEBUG oslo.service.loopingcall [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.189507] env[62820]: DEBUG nova.compute.manager [-] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1558.189603] env[62820]: DEBUG nova.network.neutron [-] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1558.272957] env[62820]: DEBUG nova.compute.utils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1558.276747] env[62820]: DEBUG nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1558.276977] env[62820]: DEBUG nova.network.neutron [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1558.305446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f713643c-a8c9-4b71-b896-4d133b54a6d8 tempest-FloatingIPsAssociationTestJSON-650434003 tempest-FloatingIPsAssociationTestJSON-650434003-project-member] Lock "aa98dbb0-5ff7-4da5-a365-2b55a8bd2216" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.881s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.309007] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 262d0714-d7d7-443c-9927-ef03ba9f230e] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1558.362147] env[62820]: DEBUG nova.policy [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7a0e8f276074325b78193cb7a2a3a02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1730db17199844cd8833f1176d249b0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1558.398475] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.398737] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.402147] env[62820]: DEBUG nova.network.neutron [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1558.778031] env[62820]: DEBUG nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1558.811817] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 31639194-b0c4-4eb9-a6f4-e61b067c807f] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1558.878341] env[62820]: DEBUG nova.network.neutron [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Successfully created port: 09d8d39a-ead7-4dd9-b773-1927cdffde6f {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1558.971941] env[62820]: DEBUG nova.network.neutron [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1559.152736] env[62820]: DEBUG nova.network.neutron [-] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.210591] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691ff531-3511-4383-9527-32e6c499a872 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.219552] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9f1daa-fc24-4c47-be13-5aaecf40344e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.259593] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d92ee4f-b276-4bc3-a232-8e15fa6cf68b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.263911] env[62820]: DEBUG nova.network.neutron [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance_info_cache with network_info: [{"id": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "address": "fa:16:3e:e8:03:6c", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ea13ce-9e", "ovs_interfaceid": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.271724] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1592aeec-3847-4129-a681-6ca41c5ccb27 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.290502] env[62820]: DEBUG nova.compute.provider_tree [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.316533] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 06fb6034-e010-49bd-9e5e-7699a43dd5a9] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1559.379713] env[62820]: INFO nova.compute.manager [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Unrescuing [ 1559.379713] env[62820]: DEBUG oslo_concurrency.lockutils [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.379713] env[62820]: DEBUG oslo_concurrency.lockutils [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.379713] env[62820]: DEBUG nova.network.neutron [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1559.657023] env[62820]: INFO nova.compute.manager [-] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Took 1.47 seconds to deallocate network for instance. [ 1559.766228] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.766799] env[62820]: DEBUG nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Instance network_info: |[{"id": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "address": "fa:16:3e:e8:03:6c", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ea13ce-9e", "ovs_interfaceid": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1559.767407] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:03:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1559.778315] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating folder: Project (7fef128f5c704730b335b62f6cce0416). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1559.778666] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-475b8dec-bafa-4762-87e4-1a7009ebd2af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.792944] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Created folder: Project (7fef128f5c704730b335b62f6cce0416) in parent group-v353379. [ 1559.793162] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating folder: Instances. Parent ref: group-v353565. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1559.793457] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3df510a5-16f9-458d-bde9-eb99c8a27692 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.797044] env[62820]: DEBUG nova.scheduler.client.report [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1559.805020] env[62820]: DEBUG nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1559.815521] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Created folder: Instances in parent group-v353565. [ 1559.817385] env[62820]: DEBUG oslo.service.loopingcall [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1559.817731] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1559.821039] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c69483bb-1b72-4307-86b4-263ff2ec9006 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.838407] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: bb13cfe6-3ccf-4e5b-bdf2-2c0e8c350f4c] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1559.848520] env[62820]: DEBUG nova.compute.manager [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Received event network-changed-66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1559.848520] env[62820]: DEBUG nova.compute.manager [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Refreshing instance network info cache due to event network-changed-66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1559.848520] env[62820]: DEBUG oslo_concurrency.lockutils [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] Acquiring lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.848520] env[62820]: DEBUG oslo_concurrency.lockutils [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] Acquired lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.848520] env[62820]: DEBUG nova.network.neutron [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Refreshing network info cache for port 66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1559.851592] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1559.852093] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1559.852093] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1559.852093] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1559.852573] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1559.852573] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1559.852573] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1559.852720] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1559.852878] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1559.853137] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1559.853240] env[62820]: DEBUG nova.virt.hardware [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1559.854587] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c4aaa9-a456-4971-a60a-a2b35323e9ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.861596] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1559.861596] env[62820]: value = "task-1695742" [ 1559.861596] env[62820]: _type = "Task" [ 1559.861596] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.869673] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfffeac7-2f3c-4099-a466-d8f752c3807a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.881136] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695742, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.166695] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.305490] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.306088] env[62820]: DEBUG nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1560.308987] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.755s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.310428] env[62820]: INFO nova.compute.claims [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1560.346024] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4ab0bb5c-259d-4419-9c7d-ed3086efdcb1] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1560.373460] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695742, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.684252] env[62820]: DEBUG nova.network.neutron [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updated VIF entry in instance network info cache for port 66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1560.684631] env[62820]: DEBUG nova.network.neutron [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance_info_cache with network_info: [{"id": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "address": "fa:16:3e:e8:03:6c", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ea13ce-9e", "ovs_interfaceid": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.718190] env[62820]: DEBUG nova.network.neutron [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Updating instance_info_cache with network_info: [{"id": "8ba6813f-c30f-416d-b888-4a33a10698ef", "address": "fa:16:3e:0d:dc:a3", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba6813f-c3", "ovs_interfaceid": "8ba6813f-c30f-416d-b888-4a33a10698ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.821059] env[62820]: DEBUG nova.compute.utils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1560.822768] env[62820]: DEBUG nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1560.822943] env[62820]: DEBUG nova.network.neutron [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1560.848675] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: d040f935-566b-4bbe-b9f6-379fd1dc1a91] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1560.873390] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695742, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.923098] env[62820]: DEBUG nova.policy [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1560.972871] env[62820]: DEBUG nova.network.neutron [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Successfully updated port: 09d8d39a-ead7-4dd9-b773-1927cdffde6f {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1561.188962] env[62820]: DEBUG oslo_concurrency.lockutils [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] Releasing lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.189404] env[62820]: DEBUG nova.compute.manager [req-94880200-1682-4d0f-a4ef-6b5925c0e88a req-fc18816c-9176-468e-ae58-d960e2ece3d5 service nova] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Received event network-vif-deleted-4a487d2d-4f2d-43bd-9691-dd7219d7b997 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1561.222443] env[62820]: DEBUG oslo_concurrency.lockutils [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.230106] env[62820]: DEBUG nova.objects.instance [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lazy-loading 'flavor' on Instance uuid eafe98b7-a67d-4bab-bfc0-8367ae069d31 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1561.262619] env[62820]: DEBUG nova.network.neutron [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Successfully created port: e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1561.328475] env[62820]: DEBUG nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1561.352472] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 9068670d-f323-4180-92f9-f19737e955e2] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1561.382827] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695742, 'name': CreateVM_Task, 'duration_secs': 1.482202} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.386466] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1561.390020] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.390020] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.390020] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1561.390020] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34585f9d-45ba-4906-9209-98bc575352a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.395320] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1561.395320] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522f9a6a-8477-4648-06f7-0d1d96003e93" [ 1561.395320] env[62820]: _type = "Task" [ 1561.395320] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.407216] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522f9a6a-8477-4648-06f7-0d1d96003e93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.481370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "refresh_cache-a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.481370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "refresh_cache-a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.481370] env[62820]: DEBUG nova.network.neutron [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1561.700085] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9c3472-5559-43fb-aea4-3b26466cba48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.708469] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efb958b-309b-4448-b5b3-62737b26e7aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.743403] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48cd7f1-23ba-4938-a1c5-5dfae307b508 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.747155] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115428d0-812b-4fa7-af9f-ed071f0c397d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.771195] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26ffc06-ca28-46ad-92ac-9a20b8d79199 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.775653] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1561.775949] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64e3c842-acf1-442d-906b-6fbda7db5aa7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.789274] env[62820]: DEBUG nova.compute.provider_tree [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.792409] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1561.792409] env[62820]: value = "task-1695743" [ 1561.792409] env[62820]: _type = "Task" [ 1561.792409] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.858685] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: fdc57b8b-a6ab-4e6d-9db0-4054b022aeec] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1561.908500] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522f9a6a-8477-4648-06f7-0d1d96003e93, 'name': SearchDatastore_Task, 'duration_secs': 0.02664} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.908815] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.908987] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1561.909278] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.909440] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.909895] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1561.909895] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1df0f5d0-9178-4767-a8fd-45c9182e99e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.919817] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1561.920011] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1561.920836] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b19b2c0f-5561-4c29-8960-4bbaf1fe9fcd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.928845] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1561.928845] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5258a174-762b-1837-cd42-037cdef258c5" [ 1561.928845] env[62820]: _type = "Task" [ 1561.928845] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.939278] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5258a174-762b-1837-cd42-037cdef258c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.943192] env[62820]: DEBUG nova.compute.manager [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Received event network-vif-plugged-09d8d39a-ead7-4dd9-b773-1927cdffde6f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1561.943461] env[62820]: DEBUG oslo_concurrency.lockutils [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] Acquiring lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.944781] env[62820]: DEBUG oslo_concurrency.lockutils [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] Lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.944781] env[62820]: DEBUG oslo_concurrency.lockutils [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] Lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.944781] env[62820]: DEBUG nova.compute.manager [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] No waiting events found dispatching network-vif-plugged-09d8d39a-ead7-4dd9-b773-1927cdffde6f {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1561.944781] env[62820]: WARNING nova.compute.manager [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Received unexpected event network-vif-plugged-09d8d39a-ead7-4dd9-b773-1927cdffde6f for instance with vm_state building and task_state spawning. [ 1561.944781] env[62820]: DEBUG nova.compute.manager [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Received event network-changed-09d8d39a-ead7-4dd9-b773-1927cdffde6f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1561.944781] env[62820]: DEBUG nova.compute.manager [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Refreshing instance network info cache due to event network-changed-09d8d39a-ead7-4dd9-b773-1927cdffde6f. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1561.945208] env[62820]: DEBUG oslo_concurrency.lockutils [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] Acquiring lock "refresh_cache-a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.021285] env[62820]: DEBUG nova.network.neutron [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1562.214504] env[62820]: DEBUG nova.network.neutron [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Updating instance_info_cache with network_info: [{"id": "09d8d39a-ead7-4dd9-b773-1927cdffde6f", "address": "fa:16:3e:6e:99:ed", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09d8d39a-ea", "ovs_interfaceid": "09d8d39a-ead7-4dd9-b773-1927cdffde6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.292504] env[62820]: DEBUG nova.scheduler.client.report [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1562.304774] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695743, 'name': PowerOffVM_Task, 'duration_secs': 0.288446} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.305067] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1562.310178] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Reconfiguring VM instance instance-00000037 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1562.310946] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c6b546f-3b26-40a3-9466-5548a537ac3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.329953] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1562.329953] env[62820]: value = "task-1695744" [ 1562.329953] env[62820]: _type = "Task" [ 1562.329953] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.338570] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695744, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.340735] env[62820]: DEBUG nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1562.364929] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 766dd26e-3866-4ef3-bd87-b81e5f6bc718] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1562.369343] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1562.369571] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1562.369757] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1562.369913] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1562.370076] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1562.370446] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1562.370446] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1562.370580] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1562.370739] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1562.370898] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1562.371078] env[62820]: DEBUG nova.virt.hardware [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1562.371917] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e37991-2d9e-4294-929f-7c449d6de373 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.380608] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16cb4f8-429a-466e-8f67-78b29a94167a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.438455] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5258a174-762b-1837-cd42-037cdef258c5, 'name': SearchDatastore_Task, 'duration_secs': 0.014623} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.439270] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b06851c8-e307-42a9-bd77-4091a2839bcc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.444870] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1562.444870] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bc2332-5242-3ae1-f64d-8fb453f3f7b2" [ 1562.444870] env[62820]: _type = "Task" [ 1562.444870] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.452794] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bc2332-5242-3ae1-f64d-8fb453f3f7b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.717030] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "refresh_cache-a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.717372] env[62820]: DEBUG nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Instance network_info: |[{"id": "09d8d39a-ead7-4dd9-b773-1927cdffde6f", "address": "fa:16:3e:6e:99:ed", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09d8d39a-ea", "ovs_interfaceid": "09d8d39a-ead7-4dd9-b773-1927cdffde6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1562.717680] env[62820]: DEBUG oslo_concurrency.lockutils [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] Acquired lock "refresh_cache-a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.717861] env[62820]: DEBUG nova.network.neutron [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Refreshing network info cache for port 09d8d39a-ead7-4dd9-b773-1927cdffde6f {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1562.719105] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:99:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3753f451-fa23-4988-9361-074fb0bd3fd4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09d8d39a-ead7-4dd9-b773-1927cdffde6f', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1562.726747] env[62820]: DEBUG oslo.service.loopingcall [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1562.727784] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1562.728036] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f712b86-0038-4ad8-8bd7-57c66d0ed530 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.750571] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1562.750571] env[62820]: value = "task-1695745" [ 1562.750571] env[62820]: _type = "Task" [ 1562.750571] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.763102] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695745, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.800487] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.801168] env[62820]: DEBUG nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1562.803936] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.316s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.804171] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.806606] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.935s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.806794] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.809142] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.051s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.810555] env[62820]: INFO nova.compute.claims [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1562.843090] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695744, 'name': ReconfigVM_Task, 'duration_secs': 0.23896} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.843090] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Reconfigured VM instance instance-00000037 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1562.843090] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1562.844254] env[62820]: INFO nova.scheduler.client.report [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleted allocations for instance ab21fd61-3a44-42fa-92be-51214b0a9a1e [ 1562.845501] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66aecd2b-24de-4f2e-8fa8-fad1cec70fa6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.851083] env[62820]: INFO nova.scheduler.client.report [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Deleted allocations for instance 7e4596bf-a8b0-4502-b80b-da372d1fba06 [ 1562.861155] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1562.861155] env[62820]: value = "task-1695746" [ 1562.861155] env[62820]: _type = "Task" [ 1562.861155] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.871238] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695746, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.875244] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0eb62424-0ee6-4ff4-94c2-bb6a10861759] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1562.956530] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bc2332-5242-3ae1-f64d-8fb453f3f7b2, 'name': SearchDatastore_Task, 'duration_secs': 0.010979} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.956953] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.957367] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 210277a2-dd10-4e08-8627-4b025a554410/210277a2-dd10-4e08-8627-4b025a554410.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1562.957778] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81659bd2-674a-4d38-ac31-e75b7f840cd5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.967266] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1562.967266] env[62820]: value = "task-1695747" [ 1562.967266] env[62820]: _type = "Task" [ 1562.967266] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.967431] env[62820]: DEBUG nova.network.neutron [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Successfully updated port: e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1562.978628] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.268397] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695745, 'name': CreateVM_Task, 'duration_secs': 0.397865} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.269788] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1563.270987] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.271354] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.271873] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1563.272376] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-976120e4-1b75-4fb9-bbcf-88746933268c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.281321] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1563.281321] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524d0aef-0191-e596-0538-b52705a6c8e5" [ 1563.281321] env[62820]: _type = "Task" [ 1563.281321] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.294181] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524d0aef-0191-e596-0538-b52705a6c8e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.317958] env[62820]: DEBUG nova.compute.utils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1563.322856] env[62820]: DEBUG nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1563.323223] env[62820]: DEBUG nova.network.neutron [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1563.360369] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d4015f-58d4-47fc-b2e2-d114789f020c tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "ab21fd61-3a44-42fa-92be-51214b0a9a1e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.095s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.363061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b3dbf6f3-52c7-46cf-adde-29d40846d476 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "7e4596bf-a8b0-4502-b80b-da372d1fba06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.437s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.378761] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695746, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.382608] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 519c961c-557e-4796-88da-047c55d6be44] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1563.413169] env[62820]: DEBUG nova.policy [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b328ffc83d344899fcbbb6e9ade1698', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bdc42fe98fb43d7bd92e2dd789aff93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1563.475740] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.475939] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.476169] env[62820]: DEBUG nova.network.neutron [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1563.488802] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695747, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.626742] env[62820]: DEBUG nova.network.neutron [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Updated VIF entry in instance network info cache for port 09d8d39a-ead7-4dd9-b773-1927cdffde6f. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1563.627264] env[62820]: DEBUG nova.network.neutron [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Updating instance_info_cache with network_info: [{"id": "09d8d39a-ead7-4dd9-b773-1927cdffde6f", "address": "fa:16:3e:6e:99:ed", "network": {"id": "d1a61e54-3e55-4e0b-8fb7-03c3821865e0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-529639739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1730db17199844cd8833f1176d249b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09d8d39a-ea", "ovs_interfaceid": "09d8d39a-ead7-4dd9-b773-1927cdffde6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.699019] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.699309] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.699525] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.699698] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.699865] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.701865] env[62820]: INFO nova.compute.manager [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Terminating instance [ 1563.794161] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524d0aef-0191-e596-0538-b52705a6c8e5, 'name': SearchDatastore_Task, 'duration_secs': 0.060865} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.794622] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.795428] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1563.795813] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.795974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.796188] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1563.796457] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94a1128b-5272-4a1c-a56c-4b93ae27d617 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.806693] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1563.806913] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1563.807540] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e15b3be5-20e6-4bf9-bd2d-11aa9ea72683 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.815456] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1563.815456] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528eb44d-634e-8341-76ef-37401211a72d" [ 1563.815456] env[62820]: _type = "Task" [ 1563.815456] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.826769] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528eb44d-634e-8341-76ef-37401211a72d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.829349] env[62820]: DEBUG nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1563.880358] env[62820]: DEBUG oslo_vmware.api [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695746, 'name': PowerOnVM_Task, 'duration_secs': 0.601911} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.880358] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1563.880358] env[62820]: DEBUG nova.compute.manager [None req-83c43a40-b081-483f-b1f3-5bde9716a1b3 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1563.880358] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b7c081-a88c-4180-bd46-0701e529bb8a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.887901] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: cc2b0ed5-b711-487d-8bfc-ee2745c9ef89] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1563.903632] env[62820]: DEBUG nova.network.neutron [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Successfully created port: d98a1f49-60e3-4537-b8fd-4994472afa94 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1563.990397] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637118} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.990664] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 210277a2-dd10-4e08-8627-4b025a554410/210277a2-dd10-4e08-8627-4b025a554410.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1563.990878] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1563.991156] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fecaf51e-52eb-4723-b641-ef6694ee5a24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.998841] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1563.998841] env[62820]: value = "task-1695749" [ 1563.998841] env[62820]: _type = "Task" [ 1563.998841] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.010633] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.039634] env[62820]: DEBUG nova.network.neutron [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1564.133138] env[62820]: DEBUG oslo_concurrency.lockutils [req-26c2b14c-11ac-45a5-8096-441a25e374ff req-c1c8cfb2-d474-49b4-a48a-799b0fc9322e service nova] Releasing lock "refresh_cache-a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.151487] env[62820]: DEBUG nova.compute.manager [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-vif-plugged-e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1564.151860] env[62820]: DEBUG oslo_concurrency.lockutils [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.151938] env[62820]: DEBUG oslo_concurrency.lockutils [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.152080] env[62820]: DEBUG oslo_concurrency.lockutils [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.152256] env[62820]: DEBUG nova.compute.manager [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] No waiting events found dispatching network-vif-plugged-e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1564.152404] env[62820]: WARNING nova.compute.manager [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received unexpected event network-vif-plugged-e8df5c6d-470d-4740-947e-1652ee33a75f for instance with vm_state building and task_state spawning. [ 1564.152559] env[62820]: DEBUG nova.compute.manager [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-changed-e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1564.152711] env[62820]: DEBUG nova.compute.manager [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Refreshing instance network info cache due to event network-changed-e8df5c6d-470d-4740-947e-1652ee33a75f. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1564.152874] env[62820]: DEBUG oslo_concurrency.lockutils [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] Acquiring lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1564.205095] env[62820]: DEBUG nova.compute.manager [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1564.205554] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1564.206366] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e180ad-6007-44b7-9732-e93896376390 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.220039] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1564.220039] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-beeb733a-6c50-4880-8510-c976b898162e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.229830] env[62820]: DEBUG oslo_vmware.api [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1564.229830] env[62820]: value = "task-1695750" [ 1564.229830] env[62820]: _type = "Task" [ 1564.229830] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.251154] env[62820]: DEBUG oslo_vmware.api [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.331089] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528eb44d-634e-8341-76ef-37401211a72d, 'name': SearchDatastore_Task, 'duration_secs': 0.017487} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.333631] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074d85f0-dda4-4d56-9935-fd4a670b709f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.336350] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7174e698-2817-41bc-83f9-1e2229afd287 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.348557] env[62820]: DEBUG nova.network.neutron [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.354637] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1564.354637] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529b784a-bb64-d875-ac92-c83a8f986101" [ 1564.354637] env[62820]: _type = "Task" [ 1564.354637] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.355903] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a49b11-4678-4192-9474-18fc0a4a93c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.402944] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529b784a-bb64-d875-ac92-c83a8f986101, 'name': SearchDatastore_Task, 'duration_secs': 0.016714} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.403398] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4e4668ed-801a-4105-8b9e-cf37be91c8b8] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1564.409476] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.409476] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a150a0d8-afcc-4a5b-a014-2c25a9bc4f07/a150a0d8-afcc-4a5b-a014-2c25a9bc4f07.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1564.411843] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13775b0f-4a19-4f8c-81ca-6bf03d4c79e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.417390] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-778fc19b-274b-4114-9713-c0c89eb9af87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.431222] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea9f1db-9226-4199-b13c-e18055cc1c69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.436182] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1564.436182] env[62820]: value = "task-1695751" [ 1564.436182] env[62820]: _type = "Task" [ 1564.436182] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.463569] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.465745] env[62820]: DEBUG nova.compute.provider_tree [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.511411] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.301141} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.512525] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1564.512525] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c70ce1b-f058-4c4f-9b03-42236b69a4f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.535214] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 210277a2-dd10-4e08-8627-4b025a554410/210277a2-dd10-4e08-8627-4b025a554410.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1564.535589] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17e295e0-29e4-41d0-8f33-11df4f121457 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.559247] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1564.559247] env[62820]: value = "task-1695752" [ 1564.559247] env[62820]: _type = "Task" [ 1564.559247] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.571892] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695752, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.741259] env[62820]: DEBUG oslo_vmware.api [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695750, 'name': PowerOffVM_Task, 'duration_secs': 0.260566} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.741259] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1564.741396] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1564.741629] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbb827c0-cd2d-48b4-8ffa-c66788e3ed7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.847020] env[62820]: DEBUG nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1564.855944] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.856582] env[62820]: DEBUG nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Instance network_info: |[{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1564.856740] env[62820]: DEBUG oslo_concurrency.lockutils [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] Acquired lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1564.857199] env[62820]: DEBUG nova.network.neutron [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Refreshing network info cache for port e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1564.858387] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:ba:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da623279-b6f6-4570-8b15-a332120b8b60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8df5c6d-470d-4740-947e-1652ee33a75f', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1564.869190] env[62820]: DEBUG oslo.service.loopingcall [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1564.869381] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1564.870191] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-228de313-f0ad-4c7d-9237-15b243f2cfd5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.889084] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1564.889287] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1564.889437] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleting the datastore file [datastore1] 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1564.890380] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e88a6f44-b688-49f2-b6e4-a45d1edd07ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.896125] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e50c3b19829f26fb844c4880ad27b508',container_format='bare',created_at=2024-12-10T16:51:25Z,direct_url=,disk_format='vmdk',id=fb30f87e-9d83-41a3-a17f-e897695c418d,min_disk=1,min_ram=0,name='tempest-test-snap-896447348',owner='6bdc42fe98fb43d7bd92e2dd789aff93',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-10T16:51:40Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1564.896457] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1564.896670] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1564.897018] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1564.897208] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1564.897370] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1564.897593] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1564.897765] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1564.898043] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1564.898292] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1564.898568] env[62820]: DEBUG nova.virt.hardware [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1564.900176] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac81671a-a36f-43e2-b465-14b84cb1df09 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.906124] env[62820]: DEBUG oslo_vmware.api [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for the task: (returnval){ [ 1564.906124] env[62820]: value = "task-1695754" [ 1564.906124] env[62820]: _type = "Task" [ 1564.906124] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.906333] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1564.906333] env[62820]: value = "task-1695755" [ 1564.906333] env[62820]: _type = "Task" [ 1564.906333] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.914416] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 6176f083-b61a-40d6-90a0-680b628a1e08] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1564.923770] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e2c239-23d8-49ba-9f5a-59aa0819f21c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.930659] env[62820]: DEBUG oslo_vmware.api [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.935023] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695755, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.962377] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695751, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.970046] env[62820]: DEBUG nova.scheduler.client.report [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1565.071606] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695752, 'name': ReconfigVM_Task, 'duration_secs': 0.338047} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.072033] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 210277a2-dd10-4e08-8627-4b025a554410/210277a2-dd10-4e08-8627-4b025a554410.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1565.072699] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54ce51c2-cded-4342-baa9-f7855318b46d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.082479] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1565.082479] env[62820]: value = "task-1695756" [ 1565.082479] env[62820]: _type = "Task" [ 1565.082479] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.092055] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695756, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.369971] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.369971] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.422383] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 15e95a20-2729-46c6-a613-32aa353ed329] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1565.424144] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695755, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.428755] env[62820]: DEBUG oslo_vmware.api [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Task: {'id': task-1695754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429268} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.431215] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1565.431414] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1565.431589] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1565.431759] env[62820]: INFO nova.compute.manager [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1565.431997] env[62820]: DEBUG oslo.service.loopingcall [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1565.436430] env[62820]: DEBUG nova.compute.manager [-] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1565.436430] env[62820]: DEBUG nova.network.neutron [-] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1565.454300] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687803} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.454300] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a150a0d8-afcc-4a5b-a014-2c25a9bc4f07/a150a0d8-afcc-4a5b-a014-2c25a9bc4f07.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1565.454300] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1565.454300] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77eee981-2eaa-4f15-83ee-d6e998a12ca7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.462678] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1565.462678] env[62820]: value = "task-1695757" [ 1565.462678] env[62820]: _type = "Task" [ 1565.462678] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.475440] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.475810] env[62820]: DEBUG nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1565.482261] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.519s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.483486] env[62820]: INFO nova.compute.claims [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1565.492153] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.593999] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695756, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.918508] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695755, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.929294] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 93098210-ca91-41b4-9b12-96fa105a2ab3] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1565.936758] env[62820]: DEBUG nova.network.neutron [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updated VIF entry in instance network info cache for port e8df5c6d-470d-4740-947e-1652ee33a75f. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1565.937211] env[62820]: DEBUG nova.network.neutron [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.938702] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "a06d736c-a704-46e8-a6f7-85d8be40804f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.938761] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.942026] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "a06d736c-a704-46e8-a6f7-85d8be40804f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.942026] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.942026] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.943336] env[62820]: INFO nova.compute.manager [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Terminating instance [ 1565.979023] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082929} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.979322] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1565.980150] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd0f591-7300-449f-a720-aba2fc0bc462 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.987938] env[62820]: DEBUG nova.compute.utils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.989882] env[62820]: DEBUG nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1565.990028] env[62820]: DEBUG nova.network.neutron [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1566.016794] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] a150a0d8-afcc-4a5b-a014-2c25a9bc4f07/a150a0d8-afcc-4a5b-a014-2c25a9bc4f07.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1566.017436] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-525d4147-5cbe-4f64-8c90-97940f5ea364 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.041782] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1566.041782] env[62820]: value = "task-1695758" [ 1566.041782] env[62820]: _type = "Task" [ 1566.041782] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.054322] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695758, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.095594] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695756, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.114833] env[62820]: DEBUG nova.policy [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3315ddc593dd40d3bc97ab71be7c802e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '573f482dc303432aba8d20980da241ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1566.228669] env[62820]: DEBUG nova.compute.manager [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Received event network-vif-plugged-d98a1f49-60e3-4537-b8fd-4994472afa94 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1566.229036] env[62820]: DEBUG oslo_concurrency.lockutils [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] Acquiring lock "93e1a842-d598-4798-88ad-622ae5dbf057-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.229338] env[62820]: DEBUG oslo_concurrency.lockutils [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] Lock "93e1a842-d598-4798-88ad-622ae5dbf057-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.229619] env[62820]: DEBUG oslo_concurrency.lockutils [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] Lock "93e1a842-d598-4798-88ad-622ae5dbf057-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.229876] env[62820]: DEBUG nova.compute.manager [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] No waiting events found dispatching network-vif-plugged-d98a1f49-60e3-4537-b8fd-4994472afa94 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1566.230102] env[62820]: WARNING nova.compute.manager [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Received unexpected event network-vif-plugged-d98a1f49-60e3-4537-b8fd-4994472afa94 for instance with vm_state building and task_state spawning. [ 1566.230309] env[62820]: DEBUG nova.compute.manager [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Received event network-vif-deleted-337d3329-4826-4d1a-a659-b6ce135f8b94 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1566.230567] env[62820]: INFO nova.compute.manager [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Neutron deleted interface 337d3329-4826-4d1a-a659-b6ce135f8b94; detaching it from the instance and deleting it from the info cache [ 1566.230689] env[62820]: DEBUG nova.network.neutron [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.315378] env[62820]: DEBUG nova.network.neutron [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Successfully updated port: d98a1f49-60e3-4537-b8fd-4994472afa94 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1566.419767] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695755, 'name': CreateVM_Task, 'duration_secs': 1.500415} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.420141] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1566.421908] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.421908] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.421908] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1566.423298] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7ef7a14-a42c-4a5a-81fc-9f12aff254ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.431015] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1566.431015] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f58d98-daef-5c78-7222-649edaaaa869" [ 1566.431015] env[62820]: _type = "Task" [ 1566.431015] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.439757] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f58d98-daef-5c78-7222-649edaaaa869, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.443306] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 42d00bd3-71fa-4c26-a544-489326163d88] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1566.445493] env[62820]: DEBUG oslo_concurrency.lockutils [req-7830673f-84a2-4968-aecd-1aac80f6b394 req-730f4226-5201-491a-815e-4804ccab50f6 service nova] Releasing lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.447470] env[62820]: DEBUG nova.compute.manager [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1566.448325] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1566.449044] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe073e07-0194-463e-af30-3d0ab9ff1cac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.456347] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1566.456586] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0398c7e-0414-4009-9cfa-b200a1acdf3a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.463819] env[62820]: DEBUG oslo_vmware.api [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1566.463819] env[62820]: value = "task-1695759" [ 1566.463819] env[62820]: _type = "Task" [ 1566.463819] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.485894] env[62820]: DEBUG oslo_vmware.api [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.495435] env[62820]: DEBUG nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1566.556460] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.565546] env[62820]: DEBUG nova.network.neutron [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Successfully created port: f482a578-9311-4e50-b484-2d7ba8486eb3 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1566.576596] env[62820]: DEBUG nova.network.neutron [-] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.600164] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695756, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.733562] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0727bd83-4826-4f89-986b-6ee940ff1abd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.747110] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea5a899-a430-4194-87cc-54fcb77e3c43 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.788398] env[62820]: DEBUG nova.compute.manager [req-5d2603e9-b252-4d52-8ace-24ab91682f3f req-acb4dd4b-779c-4d22-b244-cb471e7a3c15 service nova] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Detach interface failed, port_id=337d3329-4826-4d1a-a659-b6ce135f8b94, reason: Instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1566.819139] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "refresh_cache-93e1a842-d598-4798-88ad-622ae5dbf057" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.819346] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "refresh_cache-93e1a842-d598-4798-88ad-622ae5dbf057" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.819500] env[62820]: DEBUG nova.network.neutron [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1566.937150] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08915a5-e5fa-4b10-9288-95f51b84e62b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.948184] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80150aec-0fac-481b-b75f-81090aefcbd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.953572] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 7879cf9e-e8ec-45b8-8d1c-fe5595d6eb65] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1566.953788] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f58d98-daef-5c78-7222-649edaaaa869, 'name': SearchDatastore_Task, 'duration_secs': 0.038228} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.956326] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.956326] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1566.956326] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.956326] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.956326] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1566.956326] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bcfad76-f5a9-4964-962f-7e0d086bbc98 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.992327] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3fb132-8d70-4cc3-a108-7ef8bfc50d63 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.995075] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1566.995343] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1566.996854] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e102ccec-68a0-49cf-8e46-ef08632b5219 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.009946] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edab2c7-1784-4468-a350-59448143e68e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.017895] env[62820]: DEBUG oslo_vmware.api [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695759, 'name': PowerOffVM_Task, 'duration_secs': 0.231902} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.018926] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1567.018926] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526d71f6-c05e-2878-a830-58829c76a740" [ 1567.018926] env[62820]: _type = "Task" [ 1567.018926] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.018926] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1567.019122] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1567.020333] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b77dae6-2f3e-4712-bfcf-0044cbbec933 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.033751] env[62820]: DEBUG nova.compute.provider_tree [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1567.044339] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526d71f6-c05e-2878-a830-58829c76a740, 'name': SearchDatastore_Task, 'duration_secs': 0.035642} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.049095] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-872b7747-90d6-46db-b6db-0fdfcaa75f1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.063386] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1567.063386] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52dee6f9-9282-9db3-02f2-f1bdf4c0be5d" [ 1567.063386] env[62820]: _type = "Task" [ 1567.063386] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.067588] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695758, 'name': ReconfigVM_Task, 'duration_secs': 0.678617} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.071204] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Reconfigured VM instance instance-0000003d to attach disk [datastore1] a150a0d8-afcc-4a5b-a014-2c25a9bc4f07/a150a0d8-afcc-4a5b-a014-2c25a9bc4f07.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1567.072099] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d289d62-7ce8-490b-ab2b-a59b8e098a4e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.081118] env[62820]: INFO nova.compute.manager [-] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Took 1.65 seconds to deallocate network for instance. [ 1567.081118] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52dee6f9-9282-9db3-02f2-f1bdf4c0be5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.084497] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1567.084497] env[62820]: value = "task-1695761" [ 1567.084497] env[62820]: _type = "Task" [ 1567.084497] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.101232] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695756, 'name': Rename_Task, 'duration_secs': 1.881806} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.105224] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1567.105292] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695761, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.105986] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c294b2c-640d-4ff8-99fa-b4a351a3a6d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.113855] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1567.113855] env[62820]: value = "task-1695762" [ 1567.113855] env[62820]: _type = "Task" [ 1567.113855] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.122564] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.221803] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1567.222277] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1567.222277] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Deleting the datastore file [datastore1] a06d736c-a704-46e8-a6f7-85d8be40804f {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1567.222479] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b161f51e-cc1b-4ff0-9f65-84c9427f104a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.229607] env[62820]: DEBUG oslo_vmware.api [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for the task: (returnval){ [ 1567.229607] env[62820]: value = "task-1695763" [ 1567.229607] env[62820]: _type = "Task" [ 1567.229607] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.238131] env[62820]: DEBUG oslo_vmware.api [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695763, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.384434] env[62820]: DEBUG nova.network.neutron [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1567.455598] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: f2658dfa-baed-4ff3-8c7e-733bbcf1916e] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1567.520621] env[62820]: DEBUG nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1567.554998] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1567.555356] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1567.555615] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1567.555935] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1567.556213] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1567.556478] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1567.556776] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1567.556998] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1567.557209] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1567.557406] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1567.557687] env[62820]: DEBUG nova.virt.hardware [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1567.559071] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098349fc-404a-439f-91d1-f9e8b8d8f2b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.564707] env[62820]: ERROR nova.scheduler.client.report [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [req-7bd527fb-aa39-419f-9dde-d567df4bbb23] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7bd527fb-aa39-419f-9dde-d567df4bbb23"}]} [ 1567.578026] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa824065-3916-4f79-84b3-0c96152b32bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.584326] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52dee6f9-9282-9db3-02f2-f1bdf4c0be5d, 'name': SearchDatastore_Task, 'duration_secs': 0.038033} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.586402] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.586402] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 11843b38-3ce4-42a7-b855-a9d0b473e796/11843b38-3ce4-42a7-b855-a9d0b473e796.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1567.586402] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80a9e778-c29c-494d-a2d0-43b5e46bace1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.592032] env[62820]: DEBUG nova.scheduler.client.report [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1567.594666] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.607192] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1567.607192] env[62820]: value = "task-1695764" [ 1567.607192] env[62820]: _type = "Task" [ 1567.607192] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.611019] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695761, 'name': Rename_Task, 'duration_secs': 0.271085} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.614301] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1567.614616] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0da5446-a025-4bb6-8042-ff373299b562 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.620203] env[62820]: DEBUG nova.scheduler.client.report [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1567.620417] env[62820]: DEBUG nova.compute.provider_tree [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1567.626191] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695764, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.627499] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1567.627499] env[62820]: value = "task-1695765" [ 1567.627499] env[62820]: _type = "Task" [ 1567.627499] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.630395] env[62820]: DEBUG oslo_vmware.api [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1695762, 'name': PowerOnVM_Task, 'duration_secs': 0.507711} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.633315] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1567.633524] env[62820]: INFO nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Took 10.29 seconds to spawn the instance on the hypervisor. [ 1567.633771] env[62820]: DEBUG nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1567.634732] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace410cf-f3ce-4130-a31d-7fe7d15e985b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.638410] env[62820]: DEBUG nova.scheduler.client.report [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1567.650952] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695765, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.662542] env[62820]: DEBUG nova.scheduler.client.report [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1567.713487] env[62820]: DEBUG nova.network.neutron [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Updating instance_info_cache with network_info: [{"id": "d98a1f49-60e3-4537-b8fd-4994472afa94", "address": "fa:16:3e:c8:9d:97", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd98a1f49-60", "ovs_interfaceid": "d98a1f49-60e3-4537-b8fd-4994472afa94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.742013] env[62820]: DEBUG oslo_vmware.api [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Task: {'id': task-1695763, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227697} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.744958] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1567.745191] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1567.745502] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1567.745599] env[62820]: INFO nova.compute.manager [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1567.745871] env[62820]: DEBUG oslo.service.loopingcall [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1567.746275] env[62820]: DEBUG nova.compute.manager [-] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1567.746375] env[62820]: DEBUG nova.network.neutron [-] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1567.959289] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 846e8df9-b925-4d2e-a90e-4e774c35d0b4] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1568.093743] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbf2f96-c221-458c-8a73-125da731a1ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.103178] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c46116-4a92-4020-97b3-2abf21de47e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.144028] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cd14c2-cfaf-4bec-93ec-8fa6dd8f5c35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.150584] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695764, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.162801] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695765, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.170539] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2aace1-a708-4890-9ba5-e820328cd259 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.174044] env[62820]: INFO nova.compute.manager [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Took 58.94 seconds to build instance. [ 1568.192200] env[62820]: DEBUG nova.compute.provider_tree [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1568.218978] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "refresh_cache-93e1a842-d598-4798-88ad-622ae5dbf057" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.222018] env[62820]: DEBUG nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Instance network_info: |[{"id": "d98a1f49-60e3-4537-b8fd-4994472afa94", "address": "fa:16:3e:c8:9d:97", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd98a1f49-60", "ovs_interfaceid": "d98a1f49-60e3-4537-b8fd-4994472afa94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1568.222018] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:9d:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd98a1f49-60e3-4537-b8fd-4994472afa94', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1568.228230] env[62820]: DEBUG oslo.service.loopingcall [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1568.228894] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1568.229270] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ef3ad04-f034-451e-885f-c36e17075c2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.255238] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1568.255238] env[62820]: value = "task-1695766" [ 1568.255238] env[62820]: _type = "Task" [ 1568.255238] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.265396] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695766, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.462589] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 9287b8eb-487d-4f51-9e7c-90c016a1c8e2] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1568.624025] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695764, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666453} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.624025] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 11843b38-3ce4-42a7-b855-a9d0b473e796/11843b38-3ce4-42a7-b855-a9d0b473e796.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1568.624025] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1568.624025] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfa78c87-5331-4d6a-8c5a-4428aa6d4426 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.632306] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1568.632306] env[62820]: value = "task-1695767" [ 1568.632306] env[62820]: _type = "Task" [ 1568.632306] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.646147] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695767, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.653247] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695765, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.677745] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ae21f31-8b00-4650-bffa-699d9f634e3f tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.989s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.746440] env[62820]: DEBUG nova.scheduler.client.report [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1568.746716] env[62820]: DEBUG nova.compute.provider_tree [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 94 to 95 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1568.746895] env[62820]: DEBUG nova.compute.provider_tree [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1568.766224] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695766, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.831487] env[62820]: DEBUG nova.network.neutron [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Successfully updated port: f482a578-9311-4e50-b484-2d7ba8486eb3 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1568.959393] env[62820]: DEBUG nova.compute.manager [req-f6e0f2e6-6218-4acb-9f0c-3d64ac0c9361 req-7c344fac-eea4-44b9-91e2-4617dd30c8c2 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Received event network-changed-d98a1f49-60e3-4537-b8fd-4994472afa94 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1568.959685] env[62820]: DEBUG nova.compute.manager [req-f6e0f2e6-6218-4acb-9f0c-3d64ac0c9361 req-7c344fac-eea4-44b9-91e2-4617dd30c8c2 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Refreshing instance network info cache due to event network-changed-d98a1f49-60e3-4537-b8fd-4994472afa94. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1568.959894] env[62820]: DEBUG oslo_concurrency.lockutils [req-f6e0f2e6-6218-4acb-9f0c-3d64ac0c9361 req-7c344fac-eea4-44b9-91e2-4617dd30c8c2 service nova] Acquiring lock "refresh_cache-93e1a842-d598-4798-88ad-622ae5dbf057" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.960055] env[62820]: DEBUG oslo_concurrency.lockutils [req-f6e0f2e6-6218-4acb-9f0c-3d64ac0c9361 req-7c344fac-eea4-44b9-91e2-4617dd30c8c2 service nova] Acquired lock "refresh_cache-93e1a842-d598-4798-88ad-622ae5dbf057" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.960492] env[62820]: DEBUG nova.network.neutron [req-f6e0f2e6-6218-4acb-9f0c-3d64ac0c9361 req-7c344fac-eea4-44b9-91e2-4617dd30c8c2 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Refreshing network info cache for port d98a1f49-60e3-4537-b8fd-4994472afa94 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1568.965641] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 9910a0ea-5ce0-41e9-b449-da729a4c3223] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1569.112189] env[62820]: DEBUG nova.network.neutron [-] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.149026] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695767, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077373} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.149993] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1569.150993] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2845a4-1cf3-4f66-b9ae-dbc8bfa247c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.157753] env[62820]: DEBUG oslo_vmware.api [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695765, 'name': PowerOnVM_Task, 'duration_secs': 1.145569} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.158438] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1569.158657] env[62820]: INFO nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Took 9.36 seconds to spawn the instance on the hypervisor. [ 1569.158861] env[62820]: DEBUG nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1569.159707] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736060ef-73ce-4ceb-b741-a4a2350a46b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.184523] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 11843b38-3ce4-42a7-b855-a9d0b473e796/11843b38-3ce4-42a7-b855-a9d0b473e796.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1569.186702] env[62820]: DEBUG nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1569.189371] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c57ad38-01f9-4b57-a3c3-7890e09dc7fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.220184] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1569.220184] env[62820]: value = "task-1695768" [ 1569.220184] env[62820]: _type = "Task" [ 1569.220184] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.233161] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.256508] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.775s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.257635] env[62820]: DEBUG nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1569.260716] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.605s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.262254] env[62820]: INFO nova.compute.claims [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1569.275634] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695766, 'name': CreateVM_Task, 'duration_secs': 0.613563} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.276375] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1569.277143] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.277472] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.277862] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1569.278154] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea43e6a8-465e-412e-8c4f-c6a115763a57 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.284186] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1569.284186] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a5114f-11c8-f82d-5634-dd1fc89a25cc" [ 1569.284186] env[62820]: _type = "Task" [ 1569.284186] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.293826] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a5114f-11c8-f82d-5634-dd1fc89a25cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.334973] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "refresh_cache-b7806d81-eb2d-4724-8c40-ed88c8c77870" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.335819] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquired lock "refresh_cache-b7806d81-eb2d-4724-8c40-ed88c8c77870" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.335819] env[62820]: DEBUG nova.network.neutron [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.369022] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "3228cd34-2144-425a-aca6-400cb0991e43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.369311] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.468504] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b7c52283-eada-47fd-887f-a5ad94a0583a] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1569.617626] env[62820]: INFO nova.compute.manager [-] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Took 1.87 seconds to deallocate network for instance. [ 1569.726782] env[62820]: INFO nova.compute.manager [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Took 52.98 seconds to build instance. [ 1569.745181] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695768, 'name': ReconfigVM_Task, 'duration_secs': 0.301001} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.748275] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.748619] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 11843b38-3ce4-42a7-b855-a9d0b473e796/11843b38-3ce4-42a7-b855-a9d0b473e796.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1569.750326] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2789b29-5b7e-491f-8f03-b96284766972 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.760137] env[62820]: DEBUG nova.network.neutron [req-f6e0f2e6-6218-4acb-9f0c-3d64ac0c9361 req-7c344fac-eea4-44b9-91e2-4617dd30c8c2 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Updated VIF entry in instance network info cache for port d98a1f49-60e3-4537-b8fd-4994472afa94. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1569.760505] env[62820]: DEBUG nova.network.neutron [req-f6e0f2e6-6218-4acb-9f0c-3d64ac0c9361 req-7c344fac-eea4-44b9-91e2-4617dd30c8c2 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Updating instance_info_cache with network_info: [{"id": "d98a1f49-60e3-4537-b8fd-4994472afa94", "address": "fa:16:3e:c8:9d:97", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd98a1f49-60", "ovs_interfaceid": "d98a1f49-60e3-4537-b8fd-4994472afa94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.763995] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1569.763995] env[62820]: value = "task-1695769" [ 1569.763995] env[62820]: _type = "Task" [ 1569.763995] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.771760] env[62820]: DEBUG nova.compute.utils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1569.774142] env[62820]: DEBUG nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1569.774762] env[62820]: DEBUG nova.network.neutron [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1569.784496] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695769, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.799673] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.799936] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Processing image fb30f87e-9d83-41a3-a17f-e897695c418d {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1569.800187] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d/fb30f87e-9d83-41a3-a17f-e897695c418d.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.800369] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d/fb30f87e-9d83-41a3-a17f-e897695c418d.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.800516] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1569.800786] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a36df20d-7d3c-4bf6-ac2c-3c4ea013dbd2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.811261] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1569.811416] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1569.812161] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22199aca-85b4-4d2f-bbd0-afe7d26cca08 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.819050] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1569.819050] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52db601c-5546-fff6-9aca-52b0ef774cbf" [ 1569.819050] env[62820]: _type = "Task" [ 1569.819050] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.824043] env[62820]: DEBUG nova.policy [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f3b1396bd4e4daeb1df16f05c7d92c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cba3bf0aff2d4aedbaa9fbe886f700d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1569.834764] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52db601c-5546-fff6-9aca-52b0ef774cbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.895972] env[62820]: DEBUG nova.network.neutron [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1569.972612] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 7c5d1740-92ba-4d4b-a557-10f8ea58e883] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1570.124184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.220774] env[62820]: DEBUG nova.network.neutron [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Updating instance_info_cache with network_info: [{"id": "f482a578-9311-4e50-b484-2d7ba8486eb3", "address": "fa:16:3e:77:78:62", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf482a578-93", "ovs_interfaceid": "f482a578-9311-4e50-b484-2d7ba8486eb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.234192] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e95df53a-829b-4066-b7ae-2a3d7adaaa99 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.522s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.265169] env[62820]: DEBUG oslo_concurrency.lockutils [req-f6e0f2e6-6218-4acb-9f0c-3d64ac0c9361 req-7c344fac-eea4-44b9-91e2-4617dd30c8c2 service nova] Releasing lock "refresh_cache-93e1a842-d598-4798-88ad-622ae5dbf057" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.289606] env[62820]: DEBUG nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1570.294133] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695769, 'name': Rename_Task, 'duration_secs': 0.320638} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.295244] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1570.295814] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d540532c-91bc-403e-a29f-988928365a1d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.307853] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1570.307853] env[62820]: value = "task-1695770" [ 1570.307853] env[62820]: _type = "Task" [ 1570.307853] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.320709] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.336244] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Preparing fetch location {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1570.336540] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Fetch image to [datastore1] OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e/OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e.vmdk {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1570.336600] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Downloading stream optimized image fb30f87e-9d83-41a3-a17f-e897695c418d to [datastore1] OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e/OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e.vmdk on the data store datastore1 as vApp {{(pid=62820) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1570.336882] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Downloading image file data fb30f87e-9d83-41a3-a17f-e897695c418d to the ESX as VM named 'OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e' {{(pid=62820) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1570.397239] env[62820]: DEBUG nova.network.neutron [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Successfully created port: b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1570.455286] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1570.455286] env[62820]: value = "resgroup-9" [ 1570.455286] env[62820]: _type = "ResourcePool" [ 1570.455286] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1570.455286] env[62820]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e88b30da-92fb-47ac-9400-d3b1b851ce73 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.490617] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 2f917745-28ef-4dfe-8c09-45c15a80145d] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1570.502293] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lease: (returnval){ [ 1570.502293] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525bf304-3167-4bab-614a-60164d43a293" [ 1570.502293] env[62820]: _type = "HttpNfcLease" [ 1570.502293] env[62820]: } obtained for vApp import into resource pool (val){ [ 1570.502293] env[62820]: value = "resgroup-9" [ 1570.502293] env[62820]: _type = "ResourcePool" [ 1570.502293] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1570.502293] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the lease: (returnval){ [ 1570.502293] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525bf304-3167-4bab-614a-60164d43a293" [ 1570.502293] env[62820]: _type = "HttpNfcLease" [ 1570.502293] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1570.514673] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1570.514673] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525bf304-3167-4bab-614a-60164d43a293" [ 1570.514673] env[62820]: _type = "HttpNfcLease" [ 1570.514673] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1570.647930] env[62820]: DEBUG nova.compute.manager [req-2a8edc7a-54ff-4e01-a524-047c56370081 req-3be969c5-ec54-42aa-8e10-dcfb3f53dd64 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Received event network-changed-66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1570.648329] env[62820]: DEBUG nova.compute.manager [req-2a8edc7a-54ff-4e01-a524-047c56370081 req-3be969c5-ec54-42aa-8e10-dcfb3f53dd64 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Refreshing instance network info cache due to event network-changed-66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1570.648560] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a8edc7a-54ff-4e01-a524-047c56370081 req-3be969c5-ec54-42aa-8e10-dcfb3f53dd64 service nova] Acquiring lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.648721] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a8edc7a-54ff-4e01-a524-047c56370081 req-3be969c5-ec54-42aa-8e10-dcfb3f53dd64 service nova] Acquired lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.648894] env[62820]: DEBUG nova.network.neutron [req-2a8edc7a-54ff-4e01-a524-047c56370081 req-3be969c5-ec54-42aa-8e10-dcfb3f53dd64 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Refreshing network info cache for port 66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1570.724105] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Releasing lock "refresh_cache-b7806d81-eb2d-4724-8c40-ed88c8c77870" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.724362] env[62820]: DEBUG nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Instance network_info: |[{"id": "f482a578-9311-4e50-b484-2d7ba8486eb3", "address": "fa:16:3e:77:78:62", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf482a578-93", "ovs_interfaceid": "f482a578-9311-4e50-b484-2d7ba8486eb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1570.727548] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:78:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f482a578-9311-4e50-b484-2d7ba8486eb3', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1570.735311] env[62820]: DEBUG oslo.service.loopingcall [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1570.736065] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.736261] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.736474] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.736679] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.736833] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.738584] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1570.739045] env[62820]: INFO nova.compute.manager [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Terminating instance [ 1570.740614] env[62820]: DEBUG nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1570.743612] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29429c39-1469-41ff-9461-283879b5ea45 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.786193] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1570.786193] env[62820]: value = "task-1695772" [ 1570.786193] env[62820]: _type = "Task" [ 1570.786193] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.808122] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695772, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.824716] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695770, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.934724] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffe9ee2-75d4-4c98-abb8-afb3352e0a5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.944080] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae08d79-155c-42cd-9a56-d39252d92acc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.979041] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1691ea-8101-4efc-9d11-105b6859cd99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.989336] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62ffce5-c06a-4562-abe0-dbc9d27c8c76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.996320] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 043e14a3-df5a-4098-b147-c6460bb85423] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1571.009113] env[62820]: DEBUG nova.compute.provider_tree [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.021086] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1571.021086] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525bf304-3167-4bab-614a-60164d43a293" [ 1571.021086] env[62820]: _type = "HttpNfcLease" [ 1571.021086] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1571.234117] env[62820]: DEBUG nova.compute.manager [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Received event network-vif-deleted-b5622bc1-fd38-457a-9f31-249b2c1721ce {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1571.234450] env[62820]: DEBUG nova.compute.manager [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Received event network-vif-plugged-f482a578-9311-4e50-b484-2d7ba8486eb3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1571.234740] env[62820]: DEBUG oslo_concurrency.lockutils [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] Acquiring lock "b7806d81-eb2d-4724-8c40-ed88c8c77870-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.235023] env[62820]: DEBUG oslo_concurrency.lockutils [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] Lock "b7806d81-eb2d-4724-8c40-ed88c8c77870-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.235308] env[62820]: DEBUG oslo_concurrency.lockutils [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] Lock "b7806d81-eb2d-4724-8c40-ed88c8c77870-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.235477] env[62820]: DEBUG nova.compute.manager [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] No waiting events found dispatching network-vif-plugged-f482a578-9311-4e50-b484-2d7ba8486eb3 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1571.235662] env[62820]: WARNING nova.compute.manager [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Received unexpected event network-vif-plugged-f482a578-9311-4e50-b484-2d7ba8486eb3 for instance with vm_state building and task_state spawning. [ 1571.235922] env[62820]: DEBUG nova.compute.manager [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Received event network-changed-f482a578-9311-4e50-b484-2d7ba8486eb3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1571.236181] env[62820]: DEBUG nova.compute.manager [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Refreshing instance network info cache due to event network-changed-f482a578-9311-4e50-b484-2d7ba8486eb3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1571.236427] env[62820]: DEBUG oslo_concurrency.lockutils [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] Acquiring lock "refresh_cache-b7806d81-eb2d-4724-8c40-ed88c8c77870" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.236605] env[62820]: DEBUG oslo_concurrency.lockutils [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] Acquired lock "refresh_cache-b7806d81-eb2d-4724-8c40-ed88c8c77870" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.236817] env[62820]: DEBUG nova.network.neutron [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Refreshing network info cache for port f482a578-9311-4e50-b484-2d7ba8486eb3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1571.272698] env[62820]: DEBUG nova.compute.manager [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1571.272918] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1571.277240] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c341f64a-c98c-4a62-8cab-0b15249258ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.287807] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1571.288402] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e365e7c7-a6a2-4a86-a893-8b9bd2646ad0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.295795] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.302175] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695772, 'name': CreateVM_Task, 'duration_secs': 0.510975} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.306275] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1571.306733] env[62820]: DEBUG oslo_vmware.api [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1571.306733] env[62820]: value = "task-1695773" [ 1571.306733] env[62820]: _type = "Task" [ 1571.306733] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.307926] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.308182] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.308529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1571.309250] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99b67480-f6ef-4a97-87d9-7455d6d414c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.316264] env[62820]: DEBUG nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1571.325789] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1571.325789] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ce2743-54b0-6936-4c5e-65e31d58db31" [ 1571.325789] env[62820]: _type = "Task" [ 1571.325789] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.330692] env[62820]: DEBUG oslo_vmware.api [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695773, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.339092] env[62820]: DEBUG oslo_vmware.api [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1695770, 'name': PowerOnVM_Task, 'duration_secs': 0.933071} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.340040] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1571.340291] env[62820]: INFO nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Took 9.00 seconds to spawn the instance on the hypervisor. [ 1571.340489] env[62820]: DEBUG nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1571.343181] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0812f8f9-cdd7-4c9f-bc4f-8361eae4398c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.350752] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ce2743-54b0-6936-4c5e-65e31d58db31, 'name': SearchDatastore_Task, 'duration_secs': 0.017286} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.352980] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1571.353244] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1571.353416] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1571.353616] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1571.354055] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1571.354055] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1571.354206] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1571.354359] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1571.354541] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1571.354715] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1571.354892] env[62820]: DEBUG nova.virt.hardware [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1571.355706] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.355971] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1571.356296] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.356405] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.356606] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.358325] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06d9084-8929-455b-b957-f2c0353922fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.362924] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fa43252-7b72-4843-9620-ede72aa57f30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.377943] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e084cd08-3609-4143-9ebf-622d53b40fb2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.383185] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.383376] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1571.384464] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fdba4f1-2a20-4b4f-9846-54b618168076 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.399100] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1571.399100] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52811b24-059c-7648-309e-a40522544d69" [ 1571.399100] env[62820]: _type = "Task" [ 1571.399100] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.407888] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52811b24-059c-7648-309e-a40522544d69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.459192] env[62820]: DEBUG nova.network.neutron [req-2a8edc7a-54ff-4e01-a524-047c56370081 req-3be969c5-ec54-42aa-8e10-dcfb3f53dd64 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updated VIF entry in instance network info cache for port 66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1571.459192] env[62820]: DEBUG nova.network.neutron [req-2a8edc7a-54ff-4e01-a524-047c56370081 req-3be969c5-ec54-42aa-8e10-dcfb3f53dd64 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance_info_cache with network_info: [{"id": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "address": "fa:16:3e:e8:03:6c", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ea13ce-9e", "ovs_interfaceid": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.516354] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b3d1f811-1d28-40f7-8bf8-c29eb64896c0] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1571.519152] env[62820]: DEBUG nova.scheduler.client.report [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1571.527306] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1571.527306] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525bf304-3167-4bab-614a-60164d43a293" [ 1571.527306] env[62820]: _type = "HttpNfcLease" [ 1571.527306] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1571.527306] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1571.527306] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525bf304-3167-4bab-614a-60164d43a293" [ 1571.527306] env[62820]: _type = "HttpNfcLease" [ 1571.527306] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1571.528040] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316db2ce-21d8-47c4-9e34-7965f13011f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.536957] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52136857-675c-519d-319f-168e9e0742a9/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1571.537171] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52136857-675c-519d-319f-168e9e0742a9/disk-0.vmdk. {{(pid=62820) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1571.610132] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4c88c898-f9bc-4271-af71-c82ad6c4e151 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.819531] env[62820]: DEBUG oslo_vmware.api [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695773, 'name': PowerOffVM_Task, 'duration_secs': 0.358458} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.819531] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1571.819676] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1571.819948] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bd1331d-daa0-4a9f-b9d1-bb3ca256b08d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.881559] env[62820]: INFO nova.compute.manager [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Took 49.06 seconds to build instance. [ 1571.919477] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52811b24-059c-7648-309e-a40522544d69, 'name': SearchDatastore_Task, 'duration_secs': 0.018133} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.921511] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1571.921726] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1571.921909] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleting the datastore file [datastore1] a150a0d8-afcc-4a5b-a014-2c25a9bc4f07 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1571.922189] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baa644a8-9976-4488-ad3a-206f48001f1a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.925093] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d490a4c-5b13-4fc7-a20f-5d3476838925 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.935198] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1571.935198] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f9827f-2431-1f5e-7522-79e94a46fc09" [ 1571.935198] env[62820]: _type = "Task" [ 1571.935198] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.937516] env[62820]: DEBUG oslo_vmware.api [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for the task: (returnval){ [ 1571.937516] env[62820]: value = "task-1695775" [ 1571.937516] env[62820]: _type = "Task" [ 1571.937516] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.957705] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f9827f-2431-1f5e-7522-79e94a46fc09, 'name': SearchDatastore_Task, 'duration_secs': 0.01225} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.957964] env[62820]: DEBUG oslo_vmware.api [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.959768] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.960146] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b7806d81-eb2d-4724-8c40-ed88c8c77870/b7806d81-eb2d-4724-8c40-ed88c8c77870.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1571.960764] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a8edc7a-54ff-4e01-a524-047c56370081 req-3be969c5-ec54-42aa-8e10-dcfb3f53dd64 service nova] Releasing lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.961215] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c149972d-15d3-42cc-8da6-37f94b0d8a26 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.973643] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1571.973643] env[62820]: value = "task-1695776" [ 1571.973643] env[62820]: _type = "Task" [ 1571.973643] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.989425] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695776, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.023040] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 90ea0c16-739a-4132-ac36-e154a846b9c2] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1572.028847] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.768s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.029412] env[62820]: DEBUG nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1572.033416] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.130s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.034984] env[62820]: INFO nova.compute.claims [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1572.384371] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c9b807b5-9692-49fb-958d-022628ff27a0 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.142s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.453604] env[62820]: DEBUG oslo_vmware.api [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Task: {'id': task-1695775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221191} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.453926] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1572.454144] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1572.454327] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1572.454508] env[62820]: INFO nova.compute.manager [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1572.454763] env[62820]: DEBUG oslo.service.loopingcall [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1572.461631] env[62820]: DEBUG nova.compute.manager [-] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1572.461786] env[62820]: DEBUG nova.network.neutron [-] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1572.478716] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Completed reading data from the image iterator. {{(pid=62820) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1572.479193] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52136857-675c-519d-319f-168e9e0742a9/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1572.480613] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b64b325-03b9-439e-9514-c3a5ceae0e4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.493502] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52136857-675c-519d-319f-168e9e0742a9/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1572.493502] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52136857-675c-519d-319f-168e9e0742a9/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1572.495854] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-edcf1c83-44ac-4d41-93c0-6f7d7ed5ccc3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.498543] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695776, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.534546] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: aacc6f1c-56d6-43b9-9c40-5ea49b40a657] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1572.539814] env[62820]: DEBUG nova.compute.utils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1572.544280] env[62820]: DEBUG nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1572.544280] env[62820]: DEBUG nova.network.neutron [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1572.616259] env[62820]: DEBUG nova.network.neutron [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Updated VIF entry in instance network info cache for port f482a578-9311-4e50-b484-2d7ba8486eb3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1572.616670] env[62820]: DEBUG nova.network.neutron [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Updating instance_info_cache with network_info: [{"id": "f482a578-9311-4e50-b484-2d7ba8486eb3", "address": "fa:16:3e:77:78:62", "network": {"id": "a79c9327-326a-45eb-bf3e-7b36de6fed93", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22945fad30bb46e69a75536b22c2f833", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf482a578-93", "ovs_interfaceid": "f482a578-9311-4e50-b484-2d7ba8486eb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.620294] env[62820]: DEBUG nova.policy [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815f8967d40e4943a66da6866de8b018', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14768f5b38ea4f6abf5583ce5e4409f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1572.993581] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695776, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644533} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.993862] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b7806d81-eb2d-4724-8c40-ed88c8c77870/b7806d81-eb2d-4724-8c40-ed88c8c77870.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1572.994225] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1572.994531] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ecd5949-2b85-481c-b688-eaccd425913a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.005909] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1573.005909] env[62820]: value = "task-1695777" [ 1573.005909] env[62820]: _type = "Task" [ 1573.005909] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.020979] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.043127] env[62820]: DEBUG nova.network.neutron [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Successfully updated port: b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1573.044528] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 3c5f66f1-c4e4-4ffd-8979-f7f828dc7111] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1573.056714] env[62820]: DEBUG nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1573.080029] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52136857-675c-519d-319f-168e9e0742a9/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1573.080029] env[62820]: INFO nova.virt.vmwareapi.images [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Downloaded image file data fb30f87e-9d83-41a3-a17f-e897695c418d [ 1573.082882] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edc5129-94b0-478b-95c3-e7a96124cf30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.112468] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31bd13d2-f7d8-4ac9-b361-b26379c4db85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.124389] env[62820]: DEBUG oslo_concurrency.lockutils [req-3d08abde-a99c-44d0-bf65-2876a1fa06a4 req-b2d157e3-bad7-4483-b19a-43a30a6f7c0b service nova] Releasing lock "refresh_cache-b7806d81-eb2d-4724-8c40-ed88c8c77870" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.226445] env[62820]: INFO nova.virt.vmwareapi.images [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] The imported VM was unregistered [ 1573.229414] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Caching image {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1573.230257] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating directory with path [datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.230872] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dfe6424-617e-4fa3-94d3-2643573d95b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.243927] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Created directory with path [datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.244156] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e/OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e.vmdk to [datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d/fb30f87e-9d83-41a3-a17f-e897695c418d.vmdk. {{(pid=62820) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1573.244424] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5268f10c-3602-438f-9534-d4c7ad2cf36c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.257928] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1573.257928] env[62820]: value = "task-1695779" [ 1573.257928] env[62820]: _type = "Task" [ 1573.257928] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.268938] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695779, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.356885] env[62820]: DEBUG nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Received event network-vif-plugged-b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1573.357539] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Acquiring lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.357768] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.357942] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.358182] env[62820]: DEBUG nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] No waiting events found dispatching network-vif-plugged-b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1573.358369] env[62820]: WARNING nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Received unexpected event network-vif-plugged-b8d4a717-efe2-46fe-ab6a-186bf8529c92 for instance with vm_state building and task_state spawning. [ 1573.358536] env[62820]: DEBUG nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-changed-e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1573.359724] env[62820]: DEBUG nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Refreshing instance network info cache due to event network-changed-e8df5c6d-470d-4740-947e-1652ee33a75f. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1573.359724] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Acquiring lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.359724] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Acquired lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.359724] env[62820]: DEBUG nova.network.neutron [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Refreshing network info cache for port e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1573.371031] env[62820]: DEBUG nova.network.neutron [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Successfully created port: 4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1573.520205] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.187377} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.520563] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1573.521370] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604fea83-ac8e-4c52-901e-dc8d24a9bc87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.547984] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] b7806d81-eb2d-4724-8c40-ed88c8c77870/b7806d81-eb2d-4724-8c40-ed88c8c77870.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1573.551181] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63e22748-5664-45ec-945d-fbd96f0ac1bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.569688] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.569775] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.569875] env[62820]: DEBUG nova.network.neutron [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1573.575460] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.575667] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Cleaning up deleted instances with incomplete migration {{(pid=62820) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11624}} [ 1573.587278] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1573.587278] env[62820]: value = "task-1695780" [ 1573.587278] env[62820]: _type = "Task" [ 1573.587278] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.593211] env[62820]: DEBUG nova.network.neutron [-] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.599163] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebeb282-4981-4f6d-b29c-fe5ba2a1d0a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.617857] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585d6283-c51a-486b-b604-63dd51bbe9af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.624033] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.662886] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a83294-b5dc-4a53-9534-e1d53a962041 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.673050] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81466da-fa7e-4a3d-88d5-1257e534e825 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.692512] env[62820]: DEBUG nova.compute.provider_tree [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1573.771640] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695779, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.080187] env[62820]: DEBUG nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1574.100775] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.108480] env[62820]: INFO nova.compute.manager [-] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Took 1.65 seconds to deallocate network for instance. [ 1574.126585] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1574.126835] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1574.127131] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.127343] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1574.127498] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.127651] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1574.127864] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1574.128039] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1574.128270] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1574.128444] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1574.128621] env[62820]: DEBUG nova.virt.hardware [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1574.129605] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1faabe-009e-4bfb-9234-6f15eb987404 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.133338] env[62820]: DEBUG nova.network.neutron [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1574.143099] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db8dc59-5811-4642-b2e9-f28482866768 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.221599] env[62820]: ERROR nova.scheduler.client.report [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [req-5e23a31a-7c33-493e-8082-b5fbd90ec1b4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e23a31a-7c33-493e-8082-b5fbd90ec1b4"}]} [ 1574.242372] env[62820]: DEBUG nova.scheduler.client.report [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1574.264796] env[62820]: DEBUG nova.scheduler.client.report [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1574.265112] env[62820]: DEBUG nova.compute.provider_tree [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1574.275920] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695779, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.281450] env[62820]: DEBUG nova.scheduler.client.report [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1574.310952] env[62820]: DEBUG nova.scheduler.client.report [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1574.379624] env[62820]: DEBUG nova.network.neutron [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updated VIF entry in instance network info cache for port e8df5c6d-470d-4740-947e-1652ee33a75f. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1574.380472] env[62820]: DEBUG nova.network.neutron [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.546023] env[62820]: DEBUG nova.network.neutron [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updating instance_info_cache with network_info: [{"id": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "address": "fa:16:3e:6c:c0:5f", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d4a717-ef", "ovs_interfaceid": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.583861] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.603308] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.624864] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.775539] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695779, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.777558] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb76827-4b39-4686-aae7-ce4ee666693f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.787264] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4320fd4c-8a05-422e-92ee-47d588c2a7e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.822973] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecb2ed3-75e3-48d8-a411-6f4690bf8762 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.832872] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a319358f-8b7d-43a2-903c-2e6662669fad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.851967] env[62820]: DEBUG nova.compute.provider_tree [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1574.892156] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Releasing lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.892895] env[62820]: DEBUG nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Received event network-changed-b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1574.892895] env[62820]: DEBUG nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Refreshing instance network info cache due to event network-changed-b8d4a717-efe2-46fe-ab6a-186bf8529c92. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1574.892895] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Acquiring lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.935890] env[62820]: DEBUG nova.compute.manager [req-34ef110b-c6a1-4f25-bbfe-23bee5dda954 req-fb0c1671-76d2-4c89-bd3c-928877a592d3 service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Received event network-vif-plugged-4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1574.936156] env[62820]: DEBUG oslo_concurrency.lockutils [req-34ef110b-c6a1-4f25-bbfe-23bee5dda954 req-fb0c1671-76d2-4c89-bd3c-928877a592d3 service nova] Acquiring lock "15b6eda1-db87-45d1-a0c6-320386b02e12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.936382] env[62820]: DEBUG oslo_concurrency.lockutils [req-34ef110b-c6a1-4f25-bbfe-23bee5dda954 req-fb0c1671-76d2-4c89-bd3c-928877a592d3 service nova] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.936554] env[62820]: DEBUG oslo_concurrency.lockutils [req-34ef110b-c6a1-4f25-bbfe-23bee5dda954 req-fb0c1671-76d2-4c89-bd3c-928877a592d3 service nova] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.936730] env[62820]: DEBUG nova.compute.manager [req-34ef110b-c6a1-4f25-bbfe-23bee5dda954 req-fb0c1671-76d2-4c89-bd3c-928877a592d3 service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] No waiting events found dispatching network-vif-plugged-4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1574.936921] env[62820]: WARNING nova.compute.manager [req-34ef110b-c6a1-4f25-bbfe-23bee5dda954 req-fb0c1671-76d2-4c89-bd3c-928877a592d3 service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Received unexpected event network-vif-plugged-4b6e11a8-0891-4efe-bc15-3803f5edc4c0 for instance with vm_state building and task_state spawning. [ 1575.033646] env[62820]: DEBUG nova.network.neutron [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Successfully updated port: 4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1575.049221] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Releasing lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.049578] env[62820]: DEBUG nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Instance network_info: |[{"id": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "address": "fa:16:3e:6c:c0:5f", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d4a717-ef", "ovs_interfaceid": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1575.049881] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Acquired lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.050079] env[62820]: DEBUG nova.network.neutron [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Refreshing network info cache for port b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1575.051551] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:c0:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8d4a717-efe2-46fe-ab6a-186bf8529c92', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1575.065267] env[62820]: DEBUG oslo.service.loopingcall [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.066846] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1575.067147] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6771431e-7634-43f7-8611-36aca68765e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.095920] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1575.095920] env[62820]: value = "task-1695781" [ 1575.095920] env[62820]: _type = "Task" [ 1575.095920] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.109110] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695781, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.112598] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.274262] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695779, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.401295] env[62820]: DEBUG nova.scheduler.client.report [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 96 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1575.401295] env[62820]: DEBUG nova.compute.provider_tree [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 96 to 97 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1575.401295] env[62820]: DEBUG nova.compute.provider_tree [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1575.533212] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.533390] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.533550] env[62820]: DEBUG nova.network.neutron [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1575.604816] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.617365] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695781, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.773960] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695779, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.485236} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.774279] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e/OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e.vmdk to [datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d/fb30f87e-9d83-41a3-a17f-e897695c418d.vmdk. [ 1575.774512] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Cleaning up location [datastore1] OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1575.775279] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_55fa28a8-d2ee-455c-a4d2-86d42cf39a5e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1575.775279] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8463ed49-c126-4084-833c-343799a2896b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.788052] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1575.788052] env[62820]: value = "task-1695782" [ 1575.788052] env[62820]: _type = "Task" [ 1575.788052] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.796917] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.894490] env[62820]: DEBUG nova.network.neutron [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updated VIF entry in instance network info cache for port b8d4a717-efe2-46fe-ab6a-186bf8529c92. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1575.894944] env[62820]: DEBUG nova.network.neutron [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updating instance_info_cache with network_info: [{"id": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "address": "fa:16:3e:6c:c0:5f", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d4a717-ef", "ovs_interfaceid": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.907747] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.874s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.908330] env[62820]: DEBUG nova.compute.manager [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1575.911360] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.512s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.912970] env[62820]: INFO nova.compute.claims [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1576.079392] env[62820]: DEBUG nova.network.neutron [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1576.104378] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695780, 'name': ReconfigVM_Task, 'duration_secs': 2.243553} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.104994] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Reconfigured VM instance instance-00000040 to attach disk [datastore1] b7806d81-eb2d-4724-8c40-ed88c8c77870/b7806d81-eb2d-4724-8c40-ed88c8c77870.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1576.105620] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab7ac68e-daa7-41cf-a5c4-c2589dae9ecb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.110054] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695781, 'name': CreateVM_Task, 'duration_secs': 0.929182} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.110499] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1576.111221] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.111388] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.111696] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1576.111929] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26b33587-4c9c-461e-9318-940dde802463 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.115679] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1576.115679] env[62820]: value = "task-1695783" [ 1576.115679] env[62820]: _type = "Task" [ 1576.115679] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.117091] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1576.117091] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52709cf6-f3bc-1e36-82ce-9c0b112c584a" [ 1576.117091] env[62820]: _type = "Task" [ 1576.117091] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.129723] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695783, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.133351] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52709cf6-f3bc-1e36-82ce-9c0b112c584a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.247384] env[62820]: DEBUG nova.network.neutron [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updating instance_info_cache with network_info: [{"id": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "address": "fa:16:3e:03:5b:10", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6e11a8-08", "ovs_interfaceid": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.300066] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.049183} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.301279] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1576.301279] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d/fb30f87e-9d83-41a3-a17f-e897695c418d.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.301279] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d/fb30f87e-9d83-41a3-a17f-e897695c418d.vmdk to [datastore1] 93e1a842-d598-4798-88ad-622ae5dbf057/93e1a842-d598-4798-88ad-622ae5dbf057.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1576.301279] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40347474-412a-4410-9f3e-4340cfadeace {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.309361] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1576.309361] env[62820]: value = "task-1695784" [ 1576.309361] env[62820]: _type = "Task" [ 1576.309361] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.319889] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.468648] env[62820]: DEBUG oslo_concurrency.lockutils [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] Releasing lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.468648] env[62820]: DEBUG nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Received event network-vif-deleted-09d8d39a-ead7-4dd9-b773-1927cdffde6f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1576.468648] env[62820]: INFO nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Neutron deleted interface 09d8d39a-ead7-4dd9-b773-1927cdffde6f; detaching it from the instance and deleting it from the info cache [ 1576.468648] env[62820]: DEBUG nova.network.neutron [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.468648] env[62820]: DEBUG nova.compute.utils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1576.468648] env[62820]: DEBUG nova.compute.manager [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Not allocating networking since 'none' was specified. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1576.633162] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695783, 'name': Rename_Task, 'duration_secs': 0.304625} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.637310] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1576.637661] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52709cf6-f3bc-1e36-82ce-9c0b112c584a, 'name': SearchDatastore_Task, 'duration_secs': 0.034321} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.638366] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-854fd08c-e83d-4d24-918a-829d4ea95bc8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.639965] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.640142] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1576.640386] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.640532] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.641477] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1576.641477] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-613fd88b-ddf7-4dc0-b418-4f0b6e3d4082 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.651105] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1576.651105] env[62820]: value = "task-1695785" [ 1576.651105] env[62820]: _type = "Task" [ 1576.651105] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.662121] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.669230] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1576.669556] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1576.670345] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26c01943-6b6a-47fd-a822-3617372828f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.679774] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1576.679774] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b86fb8-e372-a8f6-c5d7-3dcb45382dac" [ 1576.679774] env[62820]: _type = "Task" [ 1576.679774] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.689622] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b86fb8-e372-a8f6-c5d7-3dcb45382dac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.752377] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.752377] env[62820]: DEBUG nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Instance network_info: |[{"id": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "address": "fa:16:3e:03:5b:10", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6e11a8-08", "ovs_interfaceid": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1576.752377] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:5b:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b6e11a8-0891-4efe-bc15-3803f5edc4c0', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1576.760014] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Creating folder: Project (14768f5b38ea4f6abf5583ce5e4409f4). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1576.760416] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7703970b-0579-44bd-bae0-09af564e147a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.775799] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Created folder: Project (14768f5b38ea4f6abf5583ce5e4409f4) in parent group-v353379. [ 1576.775799] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Creating folder: Instances. Parent ref: group-v353574. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1576.775979] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1afcd58d-6f0e-48ea-8a73-10041c44b08e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.791439] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Created folder: Instances in parent group-v353574. [ 1576.791700] env[62820]: DEBUG oslo.service.loopingcall [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1576.791930] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1576.794602] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0811936c-c0fb-4c8d-a7bd-f29c9d410fc9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.815967] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1576.815967] env[62820]: value = "task-1695788" [ 1576.815967] env[62820]: _type = "Task" [ 1576.815967] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.823102] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695784, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.831425] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695788, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.905029] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36bfabd7-ab91-488e-b0fa-78f4f970ccbe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.923323] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aee29b4-9fe6-4400-a968-672426a776b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.943420] env[62820]: DEBUG nova.compute.manager [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1576.983172] env[62820]: DEBUG nova.compute.manager [req-14dbe065-1663-4b03-8aea-29117267fe3d req-e2b9e04c-45be-4e7d-973c-242d78d766f3 service nova] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Detach interface failed, port_id=09d8d39a-ead7-4dd9-b773-1927cdffde6f, reason: Instance a150a0d8-afcc-4a5b-a014-2c25a9bc4f07 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1576.985834] env[62820]: DEBUG nova.compute.manager [req-62eb4999-c9f5-4e47-9c34-d3651162c008 req-75704481-d89b-438b-b298-ea9e667fcf2b service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Received event network-changed-4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1576.986769] env[62820]: DEBUG nova.compute.manager [req-62eb4999-c9f5-4e47-9c34-d3651162c008 req-75704481-d89b-438b-b298-ea9e667fcf2b service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Refreshing instance network info cache due to event network-changed-4b6e11a8-0891-4efe-bc15-3803f5edc4c0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1576.986769] env[62820]: DEBUG oslo_concurrency.lockutils [req-62eb4999-c9f5-4e47-9c34-d3651162c008 req-75704481-d89b-438b-b298-ea9e667fcf2b service nova] Acquiring lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.986769] env[62820]: DEBUG oslo_concurrency.lockutils [req-62eb4999-c9f5-4e47-9c34-d3651162c008 req-75704481-d89b-438b-b298-ea9e667fcf2b service nova] Acquired lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.986769] env[62820]: DEBUG nova.network.neutron [req-62eb4999-c9f5-4e47-9c34-d3651162c008 req-75704481-d89b-438b-b298-ea9e667fcf2b service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Refreshing network info cache for port 4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1577.165503] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695785, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.195928] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b86fb8-e372-a8f6-c5d7-3dcb45382dac, 'name': SearchDatastore_Task, 'duration_secs': 0.086679} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.200148] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90698b5c-9d12-4fd2-8d9d-ffe6c842675a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.210415] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1577.210415] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52380ce2-e9ba-c7ba-ae06-813c6a97f1d9" [ 1577.210415] env[62820]: _type = "Task" [ 1577.210415] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.226035] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52380ce2-e9ba-c7ba-ae06-813c6a97f1d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.327160] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695784, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.330711] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695788, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.356298] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.356541] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.430265] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d6164d-791f-4402-a5aa-38979f35dc95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.439364] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78c0010-27f4-4aa4-90c5-7efe33159588 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.477060] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170fcaa7-5517-4d9e-b87b-3493447a6ca0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.486048] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa92bbf1-8c1f-4540-b782-c94a43ee6236 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.505763] env[62820]: DEBUG nova.compute.provider_tree [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1577.662867] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695785, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.709731] env[62820]: DEBUG nova.network.neutron [req-62eb4999-c9f5-4e47-9c34-d3651162c008 req-75704481-d89b-438b-b298-ea9e667fcf2b service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updated VIF entry in instance network info cache for port 4b6e11a8-0891-4efe-bc15-3803f5edc4c0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1577.710228] env[62820]: DEBUG nova.network.neutron [req-62eb4999-c9f5-4e47-9c34-d3651162c008 req-75704481-d89b-438b-b298-ea9e667fcf2b service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updating instance_info_cache with network_info: [{"id": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "address": "fa:16:3e:03:5b:10", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6e11a8-08", "ovs_interfaceid": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.723563] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52380ce2-e9ba-c7ba-ae06-813c6a97f1d9, 'name': SearchDatastore_Task, 'duration_secs': 0.08537} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.724624] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.725377] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3a325dbf-87fb-4f7e-a665-e5d181333a5c/3a325dbf-87fb-4f7e-a665-e5d181333a5c.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1577.725377] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a145209-bcde-4c2d-9ae2-a1c3bb35cc4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.734997] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1577.734997] env[62820]: value = "task-1695789" [ 1577.734997] env[62820]: _type = "Task" [ 1577.734997] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.747519] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.823016] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695784, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.833560] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695788, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.859326] env[62820]: DEBUG nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1577.978364] env[62820]: DEBUG nova.compute.manager [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1578.013676] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1578.013921] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1578.014151] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1578.014440] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1578.014654] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1578.014886] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1578.015147] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1578.015329] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1578.015501] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1578.015689] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1578.015955] env[62820]: DEBUG nova.virt.hardware [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1578.016887] env[62820]: DEBUG nova.scheduler.client.report [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1578.024193] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc32aba-2834-40d8-accd-5f2a775bca51 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.035169] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799382e8-2629-4718-a95a-a356a0b6fe6e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.053159] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1578.057721] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Creating folder: Project (9bda1b8d342249c0962c11f6c8aad31a). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1578.059036] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9863aeaf-dbf7-4aaf-bdf8-0bd73b539db7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.075447] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Created folder: Project (9bda1b8d342249c0962c11f6c8aad31a) in parent group-v353379. [ 1578.075447] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Creating folder: Instances. Parent ref: group-v353577. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1578.075447] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b3e8986-d7b6-4881-85ad-1acad3095cf1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.087424] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Created folder: Instances in parent group-v353577. [ 1578.087678] env[62820]: DEBUG oslo.service.loopingcall [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1578.087880] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1578.088128] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c512f29-bd9a-4b6f-a866-b6df697653ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.109803] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1578.109803] env[62820]: value = "task-1695792" [ 1578.109803] env[62820]: _type = "Task" [ 1578.109803] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.119688] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695792, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.163467] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695785, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.216951] env[62820]: DEBUG oslo_concurrency.lockutils [req-62eb4999-c9f5-4e47-9c34-d3651162c008 req-75704481-d89b-438b-b298-ea9e667fcf2b service nova] Releasing lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.246153] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.323582] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695784, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.333499] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695788, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.384303] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.529365] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.529927] env[62820]: DEBUG nova.compute.manager [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1578.532751] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.329s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.534296] env[62820]: INFO nova.compute.claims [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1578.623486] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695792, 'name': CreateVM_Task, 'duration_secs': 0.495395} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.623486] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1578.623486] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.623486] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.623486] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1578.623696] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0af07cb8-265f-496f-9519-865998f91c8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.630270] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1578.630270] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d507da-6672-36af-cdb2-7d07618cc026" [ 1578.630270] env[62820]: _type = "Task" [ 1578.630270] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.641380] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d507da-6672-36af-cdb2-7d07618cc026, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.663885] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695785, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.745233] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.824883] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695784, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.411188} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.828192] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/fb30f87e-9d83-41a3-a17f-e897695c418d/fb30f87e-9d83-41a3-a17f-e897695c418d.vmdk to [datastore1] 93e1a842-d598-4798-88ad-622ae5dbf057/93e1a842-d598-4798-88ad-622ae5dbf057.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1578.828956] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebada3f-e5f4-44e7-a4d8-a84123fc8778 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.837926] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695788, 'name': CreateVM_Task, 'duration_secs': 1.563194} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.847784] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1578.857985] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 93e1a842-d598-4798-88ad-622ae5dbf057/93e1a842-d598-4798-88ad-622ae5dbf057.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1578.858897] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.859145] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d01a8d5-04a6-451d-bca3-4e208354756d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.885042] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1578.885042] env[62820]: value = "task-1695793" [ 1578.885042] env[62820]: _type = "Task" [ 1578.885042] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.896741] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695793, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.039841] env[62820]: DEBUG nova.compute.utils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1579.044252] env[62820]: DEBUG nova.compute.manager [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Not allocating networking since 'none' was specified. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1579.143291] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d507da-6672-36af-cdb2-7d07618cc026, 'name': SearchDatastore_Task, 'duration_secs': 0.072989} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.143618] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.144196] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1579.144196] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.144421] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.144694] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1579.145750] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.145750] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1579.145750] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2b6f997-1935-42e1-b370-98c9cb6bf3f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.147832] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25dfa58a-6938-4912-8cde-200c95781ec0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.154837] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1579.154837] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d0826e-84c5-2398-21fd-398da8f71091" [ 1579.154837] env[62820]: _type = "Task" [ 1579.154837] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.163564] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1579.163772] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1579.164831] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78c52d97-7726-44e2-a1b8-2d18dbc3dd1f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.173581] env[62820]: DEBUG oslo_vmware.api [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695785, 'name': PowerOnVM_Task, 'duration_secs': 2.29701} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.173785] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d0826e-84c5-2398-21fd-398da8f71091, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.174475] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1579.174693] env[62820]: INFO nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Took 11.65 seconds to spawn the instance on the hypervisor. [ 1579.174872] env[62820]: DEBUG nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1579.175691] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd4e70a-c77e-4398-a532-d3a38a6de5ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.181884] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1579.181884] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52850ee7-ac4b-cc26-60c5-44c2a9795e94" [ 1579.181884] env[62820]: _type = "Task" [ 1579.181884] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.195624] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52850ee7-ac4b-cc26-60c5-44c2a9795e94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.247120] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695789, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.441223} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.247448] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3a325dbf-87fb-4f7e-a665-e5d181333a5c/3a325dbf-87fb-4f7e-a665-e5d181333a5c.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1579.247687] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1579.247905] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-000a2dd3-052a-4868-a0eb-b23130ec3b46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.255875] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1579.255875] env[62820]: value = "task-1695794" [ 1579.255875] env[62820]: _type = "Task" [ 1579.255875] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.266765] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.399317] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695793, 'name': ReconfigVM_Task, 'duration_secs': 0.511014} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.399612] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 93e1a842-d598-4798-88ad-622ae5dbf057/93e1a842-d598-4798-88ad-622ae5dbf057.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1579.400253] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5782c28-b073-48a3-a197-413a44fce6a8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.407549] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1579.407549] env[62820]: value = "task-1695795" [ 1579.407549] env[62820]: _type = "Task" [ 1579.407549] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.418692] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695795, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.545582] env[62820]: DEBUG nova.compute.manager [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1579.670770] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d0826e-84c5-2398-21fd-398da8f71091, 'name': SearchDatastore_Task, 'duration_secs': 0.028916} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.673047] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.673296] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1579.673508] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.696918] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52850ee7-ac4b-cc26-60c5-44c2a9795e94, 'name': SearchDatastore_Task, 'duration_secs': 0.041907} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.701415] env[62820]: INFO nova.compute.manager [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Took 43.97 seconds to build instance. [ 1579.702179] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ee55a85-a752-4593-8631-4cc55692658a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.707802] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1579.707802] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ef2662-1861-ffea-5b25-97ffc49c8394" [ 1579.707802] env[62820]: _type = "Task" [ 1579.707802] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.718447] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ef2662-1861-ffea-5b25-97ffc49c8394, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.765477] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075969} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.768138] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1579.769250] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea10634-5003-4312-86fe-a8e7b36e9b46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.792696] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 3a325dbf-87fb-4f7e-a665-e5d181333a5c/3a325dbf-87fb-4f7e-a665-e5d181333a5c.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1579.795406] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-411957f3-896c-4419-b53e-17a193d3bd01 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.816467] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1579.816467] env[62820]: value = "task-1695796" [ 1579.816467] env[62820]: _type = "Task" [ 1579.816467] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.829013] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695796, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.920310] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695795, 'name': Rename_Task, 'duration_secs': 0.149484} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.920592] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1579.920855] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-051d0a3c-7969-4e23-9c15-6b950c73cea6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.926760] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c1e823-98bd-4df2-b048-23b01c8e4451 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.931179] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1579.931179] env[62820]: value = "task-1695797" [ 1579.931179] env[62820]: _type = "Task" [ 1579.931179] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.937729] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9baa9429-6eba-46cd-b3c2-a646314bbb9c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.944674] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.974392] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba533913-5610-4fa5-923a-3fd20e9ec3e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.983288] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d38a368-e953-43bc-9022-c247ee03f548 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.001875] env[62820]: DEBUG nova.compute.provider_tree [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.205417] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9fc4c4d2-c58f-4df9-97a3-7c7b69b089c5 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "b7806d81-eb2d-4724-8c40-ed88c8c77870" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.563s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.219863] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ef2662-1861-ffea-5b25-97ffc49c8394, 'name': SearchDatastore_Task, 'duration_secs': 0.050134} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.220175] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.220445] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4ae63ae5-0306-4540-be88-6e7d909c38a3/4ae63ae5-0306-4540-be88-6e7d909c38a3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1580.220730] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.220918] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1580.221150] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ff14efe-b4db-4495-8685-b159d37921c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.223500] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18696b42-3561-4093-8434-01242998c354 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.231345] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1580.231345] env[62820]: value = "task-1695798" [ 1580.231345] env[62820]: _type = "Task" [ 1580.231345] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.236892] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1580.237139] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1580.241322] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb15d118-43c1-4f04-970f-615d4703e8f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.243642] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.247472] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1580.247472] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a53585-e8ca-cb6a-da30-f825080e2f8e" [ 1580.247472] env[62820]: _type = "Task" [ 1580.247472] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.256057] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a53585-e8ca-cb6a-da30-f825080e2f8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.329112] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695796, 'name': ReconfigVM_Task, 'duration_secs': 0.373787} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.329112] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 3a325dbf-87fb-4f7e-a665-e5d181333a5c/3a325dbf-87fb-4f7e-a665-e5d181333a5c.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1580.329112] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b5586b7-d7eb-4ffe-8262-1c3f2a67d192 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.336807] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1580.336807] env[62820]: value = "task-1695799" [ 1580.336807] env[62820]: _type = "Task" [ 1580.336807] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.346837] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695799, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.443082] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695797, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.505323] env[62820]: DEBUG nova.scheduler.client.report [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1580.560604] env[62820]: DEBUG nova.compute.manager [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1580.654777] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1580.655053] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1580.655317] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1580.655409] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1580.655540] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1580.655690] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1580.655898] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1580.656061] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1580.656239] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1580.656395] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1580.656596] env[62820]: DEBUG nova.virt.hardware [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1580.657581] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3488fe16-fde0-4c08-9c97-a5f6bc98c1f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.667977] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640c4343-4c08-4a3e-b5a9-e33ad5447168 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.685771] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1580.691634] env[62820]: DEBUG oslo.service.loopingcall [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.691941] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1580.692181] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fde5a64-fcf7-4146-9992-49e5a2cfe0b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.713508] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1580.713508] env[62820]: value = "task-1695800" [ 1580.713508] env[62820]: _type = "Task" [ 1580.713508] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.729150] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695800, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.742035] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695798, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.759954] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a53585-e8ca-cb6a-da30-f825080e2f8e, 'name': SearchDatastore_Task, 'duration_secs': 0.030702} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.760870] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-254ddb50-23ea-4e46-9c4c-9fb0b2bd2880 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.771059] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1580.771059] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523cbf8e-ca43-f584-d3cd-1d34476d8e3a" [ 1580.771059] env[62820]: _type = "Task" [ 1580.771059] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.786334] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523cbf8e-ca43-f584-d3cd-1d34476d8e3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.850149] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695799, 'name': Rename_Task, 'duration_secs': 0.28063} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.850449] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1580.850717] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e95f484-ee89-4f05-8686-3c6ed148deab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.862932] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1580.862932] env[62820]: value = "task-1695801" [ 1580.862932] env[62820]: _type = "Task" [ 1580.862932] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.874040] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.944138] env[62820]: DEBUG oslo_vmware.api [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695797, 'name': PowerOnVM_Task, 'duration_secs': 0.517464} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.944467] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1580.944715] env[62820]: INFO nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Took 16.10 seconds to spawn the instance on the hypervisor. [ 1580.944955] env[62820]: DEBUG nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1580.945776] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465d96f4-1790-4848-bbeb-e30b30db4937 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.010047] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.010667] env[62820]: DEBUG nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1581.013600] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.775s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.013854] env[62820]: DEBUG nova.objects.instance [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lazy-loading 'resources' on Instance uuid 871195a8-8b7d-433f-a0b5-c570c65faf1e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1581.134428] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1d62df-df0e-4443-9900-f7aa269c994e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.141693] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b7fc3d-e7ce-4a51-9238-08da63ec42b2 tempest-ServersAdminNegativeTestJSON-1043531650 tempest-ServersAdminNegativeTestJSON-1043531650-project-admin] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Suspending the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1581.141960] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-2dded290-a585-4138-b02e-fd59489a1c2e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.150944] env[62820]: DEBUG oslo_vmware.api [None req-f7b7fc3d-e7ce-4a51-9238-08da63ec42b2 tempest-ServersAdminNegativeTestJSON-1043531650 tempest-ServersAdminNegativeTestJSON-1043531650-project-admin] Waiting for the task: (returnval){ [ 1581.150944] env[62820]: value = "task-1695802" [ 1581.150944] env[62820]: _type = "Task" [ 1581.150944] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.165728] env[62820]: DEBUG oslo_vmware.api [None req-f7b7fc3d-e7ce-4a51-9238-08da63ec42b2 tempest-ServersAdminNegativeTestJSON-1043531650 tempest-ServersAdminNegativeTestJSON-1043531650-project-admin] Task: {'id': task-1695802, 'name': SuspendVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.224016] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695800, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.242170] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728258} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.242436] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4ae63ae5-0306-4540-be88-6e7d909c38a3/4ae63ae5-0306-4540-be88-6e7d909c38a3.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1581.242651] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1581.242914] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6eeb182e-80d8-42b5-85b6-8f7a143605a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.251075] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1581.251075] env[62820]: value = "task-1695803" [ 1581.251075] env[62820]: _type = "Task" [ 1581.251075] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.260859] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695803, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.282523] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523cbf8e-ca43-f584-d3cd-1d34476d8e3a, 'name': SearchDatastore_Task, 'duration_secs': 0.058328} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.285375] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.285375] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 15b6eda1-db87-45d1-a0c6-320386b02e12/15b6eda1-db87-45d1-a0c6-320386b02e12.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1581.285375] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c20622bd-bc49-49a5-a3df-5b01fdbb587d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.292670] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1581.292670] env[62820]: value = "task-1695804" [ 1581.292670] env[62820]: _type = "Task" [ 1581.292670] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.302188] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695804, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.373071] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695801, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.464049] env[62820]: INFO nova.compute.manager [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Took 54.93 seconds to build instance. [ 1581.516478] env[62820]: DEBUG nova.compute.utils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1581.521901] env[62820]: DEBUG nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1581.521901] env[62820]: DEBUG nova.network.neutron [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1581.565386] env[62820]: DEBUG nova.policy [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e81a169ac4144a5bbc0a4e3a077cb4a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65abf73e789b48d3ba24e2660d7c0341', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1581.662262] env[62820]: DEBUG oslo_vmware.api [None req-f7b7fc3d-e7ce-4a51-9238-08da63ec42b2 tempest-ServersAdminNegativeTestJSON-1043531650 tempest-ServersAdminNegativeTestJSON-1043531650-project-admin] Task: {'id': task-1695802, 'name': SuspendVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.727125] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695800, 'name': CreateVM_Task, 'duration_secs': 0.594077} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.727354] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1581.727947] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.728147] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.728466] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1581.731275] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33a77d21-5d48-4933-b90e-47af963ceda5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.741675] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1581.741675] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d23f7f-497e-c3e9-ca76-0c2b83c317d6" [ 1581.741675] env[62820]: _type = "Task" [ 1581.741675] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.766737] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d23f7f-497e-c3e9-ca76-0c2b83c317d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.767060] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695803, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115599} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.767336] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1581.768143] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5325b38-2e68-4007-924e-1507b33f4ca6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.794245] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 4ae63ae5-0306-4540-be88-6e7d909c38a3/4ae63ae5-0306-4540-be88-6e7d909c38a3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1581.797416] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f81e11bc-dff7-4146-b37f-119b83791b70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.830339] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1581.830339] env[62820]: value = "task-1695805" [ 1581.830339] env[62820]: _type = "Task" [ 1581.830339] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.830601] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695804, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.842599] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695805, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.876564] env[62820]: DEBUG oslo_vmware.api [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695801, 'name': PowerOnVM_Task, 'duration_secs': 0.876191} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.879923] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1581.880229] env[62820]: INFO nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Took 10.56 seconds to spawn the instance on the hypervisor. [ 1581.880425] env[62820]: DEBUG nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1581.882308] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0093b1-0dc1-49a0-95af-18871ffacdcd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.968226] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d23424c5-b7cb-4c2a-86f8-b27b10c22179 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "93e1a842-d598-4798-88ad-622ae5dbf057" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.437s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.009242] env[62820]: DEBUG nova.network.neutron [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Successfully created port: ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1582.022704] env[62820]: DEBUG nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1582.030562] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba9c881-d699-421d-9605-cd05cf28939f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.041384] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec9e462-e297-49fd-9fbe-7517b7bf7a30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.077156] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "93e1a842-d598-4798-88ad-622ae5dbf057" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.077553] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "93e1a842-d598-4798-88ad-622ae5dbf057" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.077656] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "93e1a842-d598-4798-88ad-622ae5dbf057-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.077838] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "93e1a842-d598-4798-88ad-622ae5dbf057-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.078021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "93e1a842-d598-4798-88ad-622ae5dbf057-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.080298] env[62820]: INFO nova.compute.manager [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Terminating instance [ 1582.084268] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1ae6bb-44ab-4511-aa03-d67d8891e870 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.096256] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189a674f-c6db-4947-b866-86bec2f2652c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.111937] env[62820]: DEBUG nova.compute.provider_tree [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1582.166315] env[62820]: DEBUG oslo_vmware.api [None req-f7b7fc3d-e7ce-4a51-9238-08da63ec42b2 tempest-ServersAdminNegativeTestJSON-1043531650 tempest-ServersAdminNegativeTestJSON-1043531650-project-admin] Task: {'id': task-1695802, 'name': SuspendVM_Task, 'duration_secs': 0.980105} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.166741] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b7fc3d-e7ce-4a51-9238-08da63ec42b2 tempest-ServersAdminNegativeTestJSON-1043531650 tempest-ServersAdminNegativeTestJSON-1043531650-project-admin] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Suspended the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1582.166975] env[62820]: DEBUG nova.compute.manager [None req-f7b7fc3d-e7ce-4a51-9238-08da63ec42b2 tempest-ServersAdminNegativeTestJSON-1043531650 tempest-ServersAdminNegativeTestJSON-1043531650-project-admin] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1582.167948] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd212e8c-14e4-4e3e-8774-1bc66d46592d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.252858] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d23f7f-497e-c3e9-ca76-0c2b83c317d6, 'name': SearchDatastore_Task, 'duration_secs': 0.027899} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.253189] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.253430] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1582.253659] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.253808] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.253988] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1582.254272] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01196f8f-cd4b-4905-8a9d-9c2f1664df07 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.264392] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1582.264630] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1582.265545] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4e572aa-2f34-4a81-997c-c355b770f1e5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.273023] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1582.273023] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52146629-be6f-e4a5-8b3b-7f5bad83a15b" [ 1582.273023] env[62820]: _type = "Task" [ 1582.273023] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.281279] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52146629-be6f-e4a5-8b3b-7f5bad83a15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.312010] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695804, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.831227} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.312336] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 15b6eda1-db87-45d1-a0c6-320386b02e12/15b6eda1-db87-45d1-a0c6-320386b02e12.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1582.312500] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1582.312748] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3b78c41-2155-4e59-9d54-f1d85b1b36a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.322808] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1582.322808] env[62820]: value = "task-1695806" [ 1582.322808] env[62820]: _type = "Task" [ 1582.322808] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.337744] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695806, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.343807] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695805, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.404310] env[62820]: INFO nova.compute.manager [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Took 41.47 seconds to build instance. [ 1582.589050] env[62820]: DEBUG nova.compute.manager [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1582.589050] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1582.589971] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189d29aa-2b8a-48dc-84c4-2e471ed6e16d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.598147] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1582.598382] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61513f3d-db94-4097-b61b-27b87d93d975 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.604095] env[62820]: DEBUG oslo_vmware.api [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1582.604095] env[62820]: value = "task-1695807" [ 1582.604095] env[62820]: _type = "Task" [ 1582.604095] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.612144] env[62820]: DEBUG oslo_vmware.api [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695807, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.615415] env[62820]: DEBUG nova.scheduler.client.report [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1582.784072] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52146629-be6f-e4a5-8b3b-7f5bad83a15b, 'name': SearchDatastore_Task, 'duration_secs': 0.0174} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.784891] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4eb6ab4-b3c8-422b-8b85-f944c93cc8b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.791475] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1582.791475] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52654ee2-5d50-8949-bdb3-582725ceb515" [ 1582.791475] env[62820]: _type = "Task" [ 1582.791475] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.799937] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52654ee2-5d50-8949-bdb3-582725ceb515, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.833307] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695806, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082785} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.837758] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1582.838553] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e118fb-259a-47bd-93d2-a530349bb1ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.846714] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695805, 'name': ReconfigVM_Task, 'duration_secs': 0.523967} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.855541] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 4ae63ae5-0306-4540-be88-6e7d909c38a3/4ae63ae5-0306-4540-be88-6e7d909c38a3.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1582.864316] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 15b6eda1-db87-45d1-a0c6-320386b02e12/15b6eda1-db87-45d1-a0c6-320386b02e12.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1582.864566] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78604c2e-fc79-46e8-a678-02312fe14178 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.866111] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4fb218e-3910-4af8-810b-a4e04e429e65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.888615] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1582.888615] env[62820]: value = "task-1695808" [ 1582.888615] env[62820]: _type = "Task" [ 1582.888615] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.888907] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1582.888907] env[62820]: value = "task-1695809" [ 1582.888907] env[62820]: _type = "Task" [ 1582.888907] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.900732] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695808, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.903819] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ce9802f-05af-4c71-a5b1-775be1a22720 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.979s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.904020] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695809, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.033175] env[62820]: DEBUG nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1583.064161] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1583.064419] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1583.064572] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1583.064828] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1583.064904] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1583.065832] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1583.065832] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1583.065832] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1583.065832] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1583.065832] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1583.066085] env[62820]: DEBUG nova.virt.hardware [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1583.066952] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4ac1b2-f311-4ac0-89ae-10eae7f34ee7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.075631] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4286f024-a215-4ad8-95e5-887390da3220 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.114353] env[62820]: DEBUG oslo_vmware.api [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695807, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.120318] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.122517] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.956s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.122751] env[62820]: DEBUG nova.objects.instance [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lazy-loading 'resources' on Instance uuid 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1583.146437] env[62820]: INFO nova.scheduler.client.report [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Deleted allocations for instance 871195a8-8b7d-433f-a0b5-c570c65faf1e [ 1583.298977] env[62820]: DEBUG nova.compute.manager [req-0d6baffa-e35b-43a8-8867-b5e3442ee85c req-47103879-a67d-4d37-b040-4e634eb7ced6 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Received event network-changed-b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1583.299233] env[62820]: DEBUG nova.compute.manager [req-0d6baffa-e35b-43a8-8867-b5e3442ee85c req-47103879-a67d-4d37-b040-4e634eb7ced6 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Refreshing instance network info cache due to event network-changed-b8d4a717-efe2-46fe-ab6a-186bf8529c92. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1583.299457] env[62820]: DEBUG oslo_concurrency.lockutils [req-0d6baffa-e35b-43a8-8867-b5e3442ee85c req-47103879-a67d-4d37-b040-4e634eb7ced6 service nova] Acquiring lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.299555] env[62820]: DEBUG oslo_concurrency.lockutils [req-0d6baffa-e35b-43a8-8867-b5e3442ee85c req-47103879-a67d-4d37-b040-4e634eb7ced6 service nova] Acquired lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.299723] env[62820]: DEBUG nova.network.neutron [req-0d6baffa-e35b-43a8-8867-b5e3442ee85c req-47103879-a67d-4d37-b040-4e634eb7ced6 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Refreshing network info cache for port b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1583.307656] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52654ee2-5d50-8949-bdb3-582725ceb515, 'name': SearchDatastore_Task, 'duration_secs': 0.011026} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.308397] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.308653] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0/2587a273-0115-483a-ba5e-994c87bbc4d0.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.308907] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8ba897a-bcde-410a-8d51-6e467347e5a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.317915] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1583.317915] env[62820]: value = "task-1695810" [ 1583.317915] env[62820]: _type = "Task" [ 1583.317915] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.327630] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.407028] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695808, 'name': Rename_Task, 'duration_secs': 0.273483} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.409718] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1583.410029] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695809, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.410255] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7eedd8e7-9879-419b-837a-70f24b7196ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.417201] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1583.417201] env[62820]: value = "task-1695811" [ 1583.417201] env[62820]: _type = "Task" [ 1583.417201] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.430237] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.524829] env[62820]: DEBUG nova.compute.manager [req-ca1e8ac1-f478-4914-84ca-d5509cabf5e6 req-cc3a0c88-9972-4b94-8d23-56bded208224 service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Received event network-vif-plugged-ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1583.525134] env[62820]: DEBUG oslo_concurrency.lockutils [req-ca1e8ac1-f478-4914-84ca-d5509cabf5e6 req-cc3a0c88-9972-4b94-8d23-56bded208224 service nova] Acquiring lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.525358] env[62820]: DEBUG oslo_concurrency.lockutils [req-ca1e8ac1-f478-4914-84ca-d5509cabf5e6 req-cc3a0c88-9972-4b94-8d23-56bded208224 service nova] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.525548] env[62820]: DEBUG oslo_concurrency.lockutils [req-ca1e8ac1-f478-4914-84ca-d5509cabf5e6 req-cc3a0c88-9972-4b94-8d23-56bded208224 service nova] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.525705] env[62820]: DEBUG nova.compute.manager [req-ca1e8ac1-f478-4914-84ca-d5509cabf5e6 req-cc3a0c88-9972-4b94-8d23-56bded208224 service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] No waiting events found dispatching network-vif-plugged-ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1583.525881] env[62820]: WARNING nova.compute.manager [req-ca1e8ac1-f478-4914-84ca-d5509cabf5e6 req-cc3a0c88-9972-4b94-8d23-56bded208224 service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Received unexpected event network-vif-plugged-ff29c319-f707-464e-83f5-4df60b1eb8df for instance with vm_state building and task_state spawning. [ 1583.623274] env[62820]: DEBUG oslo_vmware.api [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695807, 'name': PowerOffVM_Task, 'duration_secs': 0.532334} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.623274] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1583.623274] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1583.623416] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89e371e5-b466-4ad9-b7b1-43959bffdf2c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.664108] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a1bc79d1-a100-42cf-83a2-5bda3507d52b tempest-FloatingIPsAssociationNegativeTestJSON-1163624200 tempest-FloatingIPsAssociationNegativeTestJSON-1163624200-project-member] Lock "871195a8-8b7d-433f-a0b5-c570c65faf1e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.849s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.693489] env[62820]: DEBUG nova.network.neutron [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Successfully updated port: ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1583.726420] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1583.726775] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1583.726973] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleting the datastore file [datastore1] 93e1a842-d598-4798-88ad-622ae5dbf057 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1583.730492] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ead54f0-2399-4bd9-b8c4-ce85524a43d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.744032] env[62820]: DEBUG oslo_vmware.api [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1583.744032] env[62820]: value = "task-1695813" [ 1583.744032] env[62820]: _type = "Task" [ 1583.744032] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.766379] env[62820]: DEBUG oslo_vmware.api [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695813, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.828974] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695810, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.901369] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695809, 'name': ReconfigVM_Task, 'duration_secs': 0.555211} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.904391] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 15b6eda1-db87-45d1-a0c6-320386b02e12/15b6eda1-db87-45d1-a0c6-320386b02e12.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1583.905272] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-557746d2-9b72-49ff-8cb2-bebfb8664d17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.913832] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1583.913832] env[62820]: value = "task-1695814" [ 1583.913832] env[62820]: _type = "Task" [ 1583.913832] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.930672] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695814, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.940060] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695811, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.943901] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.944687] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.945988] env[62820]: INFO nova.compute.manager [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Rebooting instance [ 1584.103558] env[62820]: DEBUG nova.network.neutron [req-0d6baffa-e35b-43a8-8867-b5e3442ee85c req-47103879-a67d-4d37-b040-4e634eb7ced6 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updated VIF entry in instance network info cache for port b8d4a717-efe2-46fe-ab6a-186bf8529c92. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1584.103558] env[62820]: DEBUG nova.network.neutron [req-0d6baffa-e35b-43a8-8867-b5e3442ee85c req-47103879-a67d-4d37-b040-4e634eb7ced6 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updating instance_info_cache with network_info: [{"id": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "address": "fa:16:3e:6c:c0:5f", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d4a717-ef", "ovs_interfaceid": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.121276] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3910d1-9844-4780-9d73-e116e8dd750c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.130834] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6089f3be-25ec-41b4-85ef-ba5e2252dca0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.166111] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e305d6af-6a41-4bfe-96a8-c556ddba5d9b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.174666] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8c2143-6c8d-4a8c-a54d-bf9fea6dfcae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.193342] env[62820]: DEBUG nova.compute.provider_tree [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1584.201535] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.201936] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.201936] env[62820]: DEBUG nova.network.neutron [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1584.255969] env[62820]: DEBUG oslo_vmware.api [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695813, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279327} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.256159] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1584.256420] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1584.256691] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1584.257072] env[62820]: INFO nova.compute.manager [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1584.257257] env[62820]: DEBUG oslo.service.loopingcall [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1584.257755] env[62820]: DEBUG nova.compute.manager [-] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1584.257912] env[62820]: DEBUG nova.network.neutron [-] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1584.330760] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695810, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558035} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.331913] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0/2587a273-0115-483a-ba5e-994c87bbc4d0.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1584.332275] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1584.332561] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54829b4b-5560-4fc6-b8ce-4a0119300415 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.341204] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1584.341204] env[62820]: value = "task-1695815" [ 1584.341204] env[62820]: _type = "Task" [ 1584.341204] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.349911] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695815, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.437111] env[62820]: DEBUG oslo_vmware.api [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695811, 'name': PowerOnVM_Task, 'duration_secs': 0.65181} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.439933] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1584.439933] env[62820]: INFO nova.compute.manager [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Took 6.46 seconds to spawn the instance on the hypervisor. [ 1584.439933] env[62820]: DEBUG nova.compute.manager [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1584.439933] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695814, 'name': Rename_Task, 'duration_secs': 0.229065} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.440641] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b651bf48-c15f-44e6-b40d-dc5b080e4493 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.443814] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1584.444223] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-842c71fb-f26c-4011-9bf5-8caf9196f2ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.456281] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1584.456281] env[62820]: value = "task-1695816" [ 1584.456281] env[62820]: _type = "Task" [ 1584.456281] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.471963] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695816, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.474755] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.606070] env[62820]: DEBUG oslo_concurrency.lockutils [req-0d6baffa-e35b-43a8-8867-b5e3442ee85c req-47103879-a67d-4d37-b040-4e634eb7ced6 service nova] Releasing lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.606518] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquired lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.606714] env[62820]: DEBUG nova.network.neutron [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1584.698025] env[62820]: DEBUG nova.scheduler.client.report [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1584.731516] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "b7806d81-eb2d-4724-8c40-ed88c8c77870" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.731820] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "b7806d81-eb2d-4724-8c40-ed88c8c77870" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.732011] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "b7806d81-eb2d-4724-8c40-ed88c8c77870-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.732381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "b7806d81-eb2d-4724-8c40-ed88c8c77870-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.732381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "b7806d81-eb2d-4724-8c40-ed88c8c77870-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.734654] env[62820]: INFO nova.compute.manager [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Terminating instance [ 1584.754707] env[62820]: DEBUG nova.network.neutron [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1584.853986] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695815, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.185013} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.853986] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1584.854200] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c1ce1e-bf0a-4a7a-94db-5c29ace2215a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.875729] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0/2587a273-0115-483a-ba5e-994c87bbc4d0.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1584.876986] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2cac40c-f93c-4e43-b975-b4b03bcc1349 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.896857] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1584.896857] env[62820]: value = "task-1695817" [ 1584.896857] env[62820]: _type = "Task" [ 1584.896857] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.905404] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695817, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.969283] env[62820]: DEBUG nova.network.neutron [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Updating instance_info_cache with network_info: [{"id": "ff29c319-f707-464e-83f5-4df60b1eb8df", "address": "fa:16:3e:f0:47:4e", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff29c319-f7", "ovs_interfaceid": "ff29c319-f707-464e-83f5-4df60b1eb8df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.971092] env[62820]: INFO nova.compute.manager [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Took 35.09 seconds to build instance. [ 1584.978377] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695816, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.047826] env[62820]: DEBUG nova.network.neutron [-] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.206978] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.084s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.209806] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.615s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.209806] env[62820]: DEBUG nova.objects.instance [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lazy-loading 'resources' on Instance uuid 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1585.241131] env[62820]: INFO nova.scheduler.client.report [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted allocations for instance 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86 [ 1585.242469] env[62820]: DEBUG nova.compute.manager [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1585.242670] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1585.244854] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccac3871-a8d8-43a3-a8e8-1ddce07270b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.257487] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1585.257624] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91cf27a6-de96-42a6-9beb-9e796cc3dab1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.332783] env[62820]: DEBUG nova.network.neutron [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updating instance_info_cache with network_info: [{"id": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "address": "fa:16:3e:6c:c0:5f", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d4a717-ef", "ovs_interfaceid": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.338406] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1585.339102] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1585.339102] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Deleting the datastore file [datastore1] b7806d81-eb2d-4724-8c40-ed88c8c77870 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1585.339201] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b21b86e-e726-4398-a5e8-b6ccb45c202e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.347455] env[62820]: DEBUG oslo_vmware.api [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1585.347455] env[62820]: value = "task-1695819" [ 1585.347455] env[62820]: _type = "Task" [ 1585.347455] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.356590] env[62820]: DEBUG oslo_vmware.api [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695819, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.409037] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695817, 'name': ReconfigVM_Task, 'duration_secs': 0.293064} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.409777] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0/2587a273-0115-483a-ba5e-994c87bbc4d0.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1585.410490] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3ba0dc4-3aa8-47d1-bec0-4c3ecc687eee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.417977] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1585.417977] env[62820]: value = "task-1695820" [ 1585.417977] env[62820]: _type = "Task" [ 1585.417977] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.434658] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695820, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.472246] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.472670] env[62820]: DEBUG nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Instance network_info: |[{"id": "ff29c319-f707-464e-83f5-4df60b1eb8df", "address": "fa:16:3e:f0:47:4e", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff29c319-f7", "ovs_interfaceid": "ff29c319-f707-464e-83f5-4df60b1eb8df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1585.473016] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695816, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.473457] env[62820]: DEBUG oslo_concurrency.lockutils [None req-272afe4e-379e-4862-b208-ce31547e6cd0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "4ae63ae5-0306-4540-be88-6e7d909c38a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.600s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.473826] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:47:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff29c319-f707-464e-83f5-4df60b1eb8df', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1585.483129] env[62820]: DEBUG oslo.service.loopingcall [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1585.483781] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1585.484073] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6345eb6b-fc38-4744-9e20-116e70ac29b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.505655] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1585.505655] env[62820]: value = "task-1695821" [ 1585.505655] env[62820]: _type = "Task" [ 1585.505655] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.514161] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695821, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.550897] env[62820]: INFO nova.compute.manager [-] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Took 1.29 seconds to deallocate network for instance. [ 1585.641701] env[62820]: DEBUG nova.compute.manager [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Received event network-changed-ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1585.641916] env[62820]: DEBUG nova.compute.manager [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Refreshing instance network info cache due to event network-changed-ff29c319-f707-464e-83f5-4df60b1eb8df. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1585.642286] env[62820]: DEBUG oslo_concurrency.lockutils [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] Acquiring lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.642449] env[62820]: DEBUG oslo_concurrency.lockutils [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] Acquired lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.642610] env[62820]: DEBUG nova.network.neutron [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Refreshing network info cache for port ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1585.757839] env[62820]: DEBUG oslo_concurrency.lockutils [None req-39231b3c-66c1-41ee-af49-a1c4b8f0709d tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "0ed6ab62-6ae1-4b1a-be2e-a2312334fd86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.220s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.837440] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Releasing lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.858192] env[62820]: DEBUG oslo_vmware.api [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695819, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161693} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.861451] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1585.861571] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1585.861952] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1585.862303] env[62820]: INFO nova.compute.manager [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1585.862362] env[62820]: DEBUG oslo.service.loopingcall [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1585.863012] env[62820]: DEBUG nova.compute.manager [-] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1585.863132] env[62820]: DEBUG nova.network.neutron [-] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1585.935209] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695820, 'name': Rename_Task, 'duration_secs': 0.151602} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.935517] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1585.936171] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec93d7f3-d658-4e9d-8030-f7c451971af6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.947655] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1585.947655] env[62820]: value = "task-1695822" [ 1585.947655] env[62820]: _type = "Task" [ 1585.947655] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.959147] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695822, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.976966] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695816, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.018692] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695821, 'name': CreateVM_Task, 'duration_secs': 0.434031} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.021176] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1586.021923] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.022124] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.022508] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1586.022643] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaa31c26-a0f7-44c5-bb99-7e25be30b5ea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.028113] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1586.028113] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bf7c5f-5e86-f66d-b7cd-e517d425eba1" [ 1586.028113] env[62820]: _type = "Task" [ 1586.028113] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.038854] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bf7c5f-5e86-f66d-b7cd-e517d425eba1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.058357] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.120940] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1014fb0e-89ce-47f8-907c-6f489479b7f8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.129522] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a20bf1-5f53-414c-9c8f-cf5a516ce2ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.164682] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1c1c8f-f349-4147-8799-bd36c6e81897 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.174245] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbac4bd-6d28-40cf-a621-fcfe82d67d66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.189970] env[62820]: DEBUG nova.compute.provider_tree [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.342117] env[62820]: DEBUG nova.compute.manager [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1586.343317] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63171e5-71ee-4179-afce-4c02543db3a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.460217] env[62820]: DEBUG oslo_vmware.api [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695822, 'name': PowerOnVM_Task, 'duration_secs': 0.461351} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.461919] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1586.462798] env[62820]: INFO nova.compute.manager [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Took 5.90 seconds to spawn the instance on the hypervisor. [ 1586.463013] env[62820]: DEBUG nova.compute.manager [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1586.464469] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a61179-6bd5-4bda-a111-faf60b351736 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.478255] env[62820]: DEBUG oslo_vmware.api [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1695816, 'name': PowerOnVM_Task, 'duration_secs': 1.638223} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.480015] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1586.480553] env[62820]: INFO nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Took 12.40 seconds to spawn the instance on the hypervisor. [ 1586.480743] env[62820]: DEBUG nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1586.485786] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cb0fde-1c78-4c80-8d63-f0690187425a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.504756] env[62820]: DEBUG nova.network.neutron [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Updated VIF entry in instance network info cache for port ff29c319-f707-464e-83f5-4df60b1eb8df. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1586.504908] env[62820]: DEBUG nova.network.neutron [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Updating instance_info_cache with network_info: [{"id": "ff29c319-f707-464e-83f5-4df60b1eb8df", "address": "fa:16:3e:f0:47:4e", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff29c319-f7", "ovs_interfaceid": "ff29c319-f707-464e-83f5-4df60b1eb8df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.540030] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bf7c5f-5e86-f66d-b7cd-e517d425eba1, 'name': SearchDatastore_Task, 'duration_secs': 0.013408} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.540030] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.540030] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1586.540306] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.540424] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.540598] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1586.540854] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7014c6f6-a836-4021-b16f-65000146d151 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.550008] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1586.550500] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1586.551229] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5035043e-096e-4a1a-bdcb-01ac6fb9045f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.557571] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1586.557571] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5202310c-cb85-b5fe-3379-5bcba07eed93" [ 1586.557571] env[62820]: _type = "Task" [ 1586.557571] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.566623] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5202310c-cb85-b5fe-3379-5bcba07eed93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.693223] env[62820]: DEBUG nova.scheduler.client.report [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1586.697397] env[62820]: DEBUG nova.network.neutron [-] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.727364] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.727691] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.727944] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.728221] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.728436] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.730755] env[62820]: INFO nova.compute.manager [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Terminating instance [ 1586.994529] env[62820]: INFO nova.compute.manager [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Took 36.61 seconds to build instance. [ 1587.002487] env[62820]: INFO nova.compute.manager [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Took 43.37 seconds to build instance. [ 1587.008134] env[62820]: DEBUG oslo_concurrency.lockutils [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] Releasing lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.008423] env[62820]: DEBUG nova.compute.manager [req-07bba73a-ae80-402c-b461-a7c94dd5929e req-51d1e393-75fe-4606-8600-40aa49ef9c18 service nova] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Received event network-vif-deleted-d98a1f49-60e3-4537-b8fd-4994472afa94 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1587.068383] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5202310c-cb85-b5fe-3379-5bcba07eed93, 'name': SearchDatastore_Task, 'duration_secs': 0.012809} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.069206] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c750e096-052c-44b5-a915-32fdc150fb5c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.076923] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1587.076923] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52628cc0-04cf-12c4-e656-0475f62f9222" [ 1587.076923] env[62820]: _type = "Task" [ 1587.076923] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.082431] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52628cc0-04cf-12c4-e656-0475f62f9222, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.199292] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.990s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.202617] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.453s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.204137] env[62820]: INFO nova.compute.claims [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1587.214024] env[62820]: INFO nova.compute.manager [-] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Took 1.35 seconds to deallocate network for instance. [ 1587.233453] env[62820]: INFO nova.scheduler.client.report [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Deleted allocations for instance 4fa6e38f-dcca-4f65-86d6-1c585deb1c13 [ 1587.236052] env[62820]: DEBUG nova.compute.manager [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1587.236052] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1587.242387] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaeb747d-ddfd-4959-a969-d8f8090428b6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.250498] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1587.250857] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b84211e-3818-4238-b6a2-093bd66bc6e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.258731] env[62820]: DEBUG oslo_vmware.api [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1587.258731] env[62820]: value = "task-1695823" [ 1587.258731] env[62820]: _type = "Task" [ 1587.258731] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.268175] env[62820]: DEBUG oslo_vmware.api [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.366080] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68650e82-f6bf-4dd3-9111-743acce1eef9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.375878] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Doing hard reboot of VM {{(pid=62820) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1587.376228] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-af644042-5ea7-4605-ade6-eee3fe83f49a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.383102] env[62820]: DEBUG oslo_vmware.api [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1587.383102] env[62820]: value = "task-1695824" [ 1587.383102] env[62820]: _type = "Task" [ 1587.383102] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.391733] env[62820]: DEBUG oslo_vmware.api [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695824, 'name': ResetVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.497399] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d39d797e-994c-4602-82d7-48a08babf251 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "2587a273-0115-483a-ba5e-994c87bbc4d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.127s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.505981] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc7fc236-9a91-4287-8f1c-ea94652bcb55 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.877s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.590179] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52628cc0-04cf-12c4-e656-0475f62f9222, 'name': SearchDatastore_Task, 'duration_secs': 0.055114} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.590530] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.591947] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4ac8c3b8-e5e5-4a74-a430-a88e856b705e/4ac8c3b8-e5e5-4a74-a430-a88e856b705e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1587.591947] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46e3a059-dfdb-47cc-be46-527c7774949e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.598849] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1587.598849] env[62820]: value = "task-1695825" [ 1587.598849] env[62820]: _type = "Task" [ 1587.598849] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.607600] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695825, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.718459] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.749499] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d089a779-df30-47ff-bda7-5cda49c59d5d tempest-MigrationsAdminTest-2107799577 tempest-MigrationsAdminTest-2107799577-project-member] Lock "4fa6e38f-dcca-4f65-86d6-1c585deb1c13" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.050s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.766248] env[62820]: DEBUG nova.compute.manager [req-0e2af12a-d16b-40f6-9b99-2f0055069e7d req-7cfa1b12-2038-4cfe-af4d-d373673d7dc9 service nova] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Received event network-vif-deleted-f482a578-9311-4e50-b484-2d7ba8486eb3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1587.781643] env[62820]: DEBUG oslo_vmware.api [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695823, 'name': PowerOffVM_Task, 'duration_secs': 0.318766} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.781643] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1587.781643] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1587.781643] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-660a179b-2ed6-48aa-89b0-456156894e89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.857408] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1587.857739] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1587.858070] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleting the datastore file [datastore1] 069f58d6-f6bc-4ded-8274-6fed7c2f45b3 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1587.858465] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3434aa0-d306-43c8-99b4-dc59a00baab6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.865691] env[62820]: DEBUG oslo_vmware.api [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for the task: (returnval){ [ 1587.865691] env[62820]: value = "task-1695827" [ 1587.865691] env[62820]: _type = "Task" [ 1587.865691] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.876293] env[62820]: DEBUG oslo_vmware.api [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.896204] env[62820]: DEBUG oslo_vmware.api [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695824, 'name': ResetVM_Task, 'duration_secs': 0.096887} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.896859] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Did hard reboot of VM {{(pid=62820) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1587.897245] env[62820]: DEBUG nova.compute.manager [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1587.898579] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c045383-5a69-4d12-a107-e9ba42b212b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.112295] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695825, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.270430] env[62820]: INFO nova.compute.manager [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Rebuilding instance [ 1588.331608] env[62820]: DEBUG nova.compute.manager [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1588.335025] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e9d7c8-4e0e-4ab7-b41f-d8a3c455b7a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.378587] env[62820]: DEBUG oslo_vmware.api [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.414777] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f2a1a55-c3b1-4518-a067-c3e8369004b3 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.470s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.610248] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695825, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658417} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.610688] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4ac8c3b8-e5e5-4a74-a430-a88e856b705e/4ac8c3b8-e5e5-4a74-a430-a88e856b705e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1588.611085] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1588.611656] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17df53de-ae3b-4e3e-b506-378a66d5d867 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.622338] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1588.622338] env[62820]: value = "task-1695828" [ 1588.622338] env[62820]: _type = "Task" [ 1588.622338] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.628359] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d67b67-5b9a-4a4a-bcc8-f067a7ecd836 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.635984] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695828, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.642789] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b164c8e-a1d9-4de5-8540-871978816c88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.678803] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a99bd20-af9a-4ab8-bf99-bd21dc760e0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.686872] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b122cc7d-f538-4ad3-ad1a-6e250b9ad135 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.702161] env[62820]: DEBUG nova.compute.provider_tree [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1588.877199] env[62820]: DEBUG oslo_vmware.api [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Task: {'id': task-1695827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.779909} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.877663] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1588.878482] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1588.878482] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1588.878482] env[62820]: INFO nova.compute.manager [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1588.878645] env[62820]: DEBUG oslo.service.loopingcall [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.878861] env[62820]: DEBUG nova.compute.manager [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1588.878990] env[62820]: DEBUG nova.network.neutron [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1589.137386] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695828, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119094} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.137778] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1589.138709] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22bbda71-f06c-446d-8e98-b9ffe43d6cd1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.170248] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 4ac8c3b8-e5e5-4a74-a430-a88e856b705e/4ac8c3b8-e5e5-4a74-a430-a88e856b705e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1589.170668] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fc7e398-3282-4ae8-83b2-868db387af77 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.196535] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1589.196535] env[62820]: value = "task-1695829" [ 1589.196535] env[62820]: _type = "Task" [ 1589.196535] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.207240] env[62820]: DEBUG nova.scheduler.client.report [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1589.214657] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695829, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.335812] env[62820]: DEBUG nova.compute.manager [req-ec7d4871-78e8-4ca9-9324-433b8e4cf58e req-c36690d6-c928-48d3-a25b-58f81b280369 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Received event network-changed-b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1589.336055] env[62820]: DEBUG nova.compute.manager [req-ec7d4871-78e8-4ca9-9324-433b8e4cf58e req-c36690d6-c928-48d3-a25b-58f81b280369 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Refreshing instance network info cache due to event network-changed-b8d4a717-efe2-46fe-ab6a-186bf8529c92. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1589.336298] env[62820]: DEBUG oslo_concurrency.lockutils [req-ec7d4871-78e8-4ca9-9324-433b8e4cf58e req-c36690d6-c928-48d3-a25b-58f81b280369 service nova] Acquiring lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.336459] env[62820]: DEBUG oslo_concurrency.lockutils [req-ec7d4871-78e8-4ca9-9324-433b8e4cf58e req-c36690d6-c928-48d3-a25b-58f81b280369 service nova] Acquired lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.336640] env[62820]: DEBUG nova.network.neutron [req-ec7d4871-78e8-4ca9-9324-433b8e4cf58e req-c36690d6-c928-48d3-a25b-58f81b280369 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Refreshing network info cache for port b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1589.350486] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1589.351090] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abf5b821-17b3-41cf-bd1a-faf9abe2a411 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.360892] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1589.360892] env[62820]: value = "task-1695830" [ 1589.360892] env[62820]: _type = "Task" [ 1589.360892] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.367752] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.705700] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695829, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.717855] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.718436] env[62820]: DEBUG nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1589.721363] env[62820]: DEBUG nova.network.neutron [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.721928] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.598s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.722584] env[62820]: DEBUG nova.objects.instance [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lazy-loading 'resources' on Instance uuid a06d736c-a704-46e8-a6f7-85d8be40804f {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1589.792034] env[62820]: DEBUG nova.compute.manager [req-9a895dba-809d-4559-953b-3e3e332df88d req-edea8604-077a-41db-b447-2fb471e11872 service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Received event network-changed-4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1589.792263] env[62820]: DEBUG nova.compute.manager [req-9a895dba-809d-4559-953b-3e3e332df88d req-edea8604-077a-41db-b447-2fb471e11872 service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Refreshing instance network info cache due to event network-changed-4b6e11a8-0891-4efe-bc15-3803f5edc4c0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1589.792489] env[62820]: DEBUG oslo_concurrency.lockutils [req-9a895dba-809d-4559-953b-3e3e332df88d req-edea8604-077a-41db-b447-2fb471e11872 service nova] Acquiring lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.792633] env[62820]: DEBUG oslo_concurrency.lockutils [req-9a895dba-809d-4559-953b-3e3e332df88d req-edea8604-077a-41db-b447-2fb471e11872 service nova] Acquired lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.792795] env[62820]: DEBUG nova.network.neutron [req-9a895dba-809d-4559-953b-3e3e332df88d req-edea8604-077a-41db-b447-2fb471e11872 service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Refreshing network info cache for port 4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1589.842737] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquiring lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.842981] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.872225] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695830, 'name': PowerOffVM_Task, 'duration_secs': 0.250626} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.872535] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1589.872881] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1589.873640] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea139001-4325-4ad8-b69d-b4d5417e292c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.883706] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1589.883788] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afa01036-93f3-4346-8044-98810e80dbbd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.912380] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1589.912380] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1589.912380] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Deleting the datastore file [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1589.912380] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35f2b9c4-f248-43dd-93ad-9d76fb5ee669 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.919178] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1589.919178] env[62820]: value = "task-1695832" [ 1589.919178] env[62820]: _type = "Task" [ 1589.919178] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.932062] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.019473] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.020125] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.020125] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.020263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.020338] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.023872] env[62820]: INFO nova.compute.manager [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Terminating instance [ 1590.084356] env[62820]: DEBUG nova.network.neutron [req-ec7d4871-78e8-4ca9-9324-433b8e4cf58e req-c36690d6-c928-48d3-a25b-58f81b280369 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updated VIF entry in instance network info cache for port b8d4a717-efe2-46fe-ab6a-186bf8529c92. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1590.084743] env[62820]: DEBUG nova.network.neutron [req-ec7d4871-78e8-4ca9-9324-433b8e4cf58e req-c36690d6-c928-48d3-a25b-58f81b280369 service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updating instance_info_cache with network_info: [{"id": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "address": "fa:16:3e:6c:c0:5f", "network": {"id": "dfc2788e-1d08-4284-b4c9-6bf3fc121eef", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-458067686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cba3bf0aff2d4aedbaa9fbe886f700d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d4a717-ef", "ovs_interfaceid": "b8d4a717-efe2-46fe-ab6a-186bf8529c92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.209029] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695829, 'name': ReconfigVM_Task, 'duration_secs': 0.642715} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.209319] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 4ac8c3b8-e5e5-4a74-a430-a88e856b705e/4ac8c3b8-e5e5-4a74-a430-a88e856b705e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1590.209940] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37370068-78b0-49c6-a63a-b1e4020d6be5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.216911] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1590.216911] env[62820]: value = "task-1695833" [ 1590.216911] env[62820]: _type = "Task" [ 1590.216911] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.224057] env[62820]: INFO nova.compute.manager [-] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Took 1.34 seconds to deallocate network for instance. [ 1590.225491] env[62820]: DEBUG nova.compute.utils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1590.233776] env[62820]: DEBUG nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1590.234031] env[62820]: DEBUG nova.network.neutron [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1590.241194] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695833, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.241885] env[62820]: DEBUG nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1590.296821] env[62820]: DEBUG nova.policy [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc838df5682041ed97e19ce34d9f14ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a07ed2a19149b3a58ee43a07e13bba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1590.346251] env[62820]: DEBUG nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1590.433034] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10059} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.434048] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1590.434258] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1590.434473] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1590.530212] env[62820]: DEBUG nova.compute.manager [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1590.530455] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1590.531347] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053af02f-44cb-48c7-951e-60d105c52560 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.539204] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1590.539424] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b63e288f-916c-4e04-b805-1e874be71a61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.546835] env[62820]: DEBUG oslo_vmware.api [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1590.546835] env[62820]: value = "task-1695834" [ 1590.546835] env[62820]: _type = "Task" [ 1590.546835] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.556372] env[62820]: DEBUG oslo_vmware.api [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.588095] env[62820]: DEBUG oslo_concurrency.lockutils [req-ec7d4871-78e8-4ca9-9324-433b8e4cf58e req-c36690d6-c928-48d3-a25b-58f81b280369 service nova] Releasing lock "refresh_cache-3a325dbf-87fb-4f7e-a665-e5d181333a5c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.596505] env[62820]: DEBUG nova.network.neutron [req-9a895dba-809d-4559-953b-3e3e332df88d req-edea8604-077a-41db-b447-2fb471e11872 service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updated VIF entry in instance network info cache for port 4b6e11a8-0891-4efe-bc15-3803f5edc4c0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1590.596875] env[62820]: DEBUG nova.network.neutron [req-9a895dba-809d-4559-953b-3e3e332df88d req-edea8604-077a-41db-b447-2fb471e11872 service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updating instance_info_cache with network_info: [{"id": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "address": "fa:16:3e:03:5b:10", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6e11a8-08", "ovs_interfaceid": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.648132] env[62820]: DEBUG nova.network.neutron [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Successfully created port: 89fa6298-ff56-4900-8160-84554ea1e23c {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1590.668029] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93e7d0d-d212-4f53-a89f-741406ff4f65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.679978] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da28f8e9-403f-46cb-b137-b0ef26c1cae1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.710751] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f9d310-55f7-4375-b669-989aa3bafbfc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.722230] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7499c1-bd9a-4980-ad27-b8b77487efd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.736091] env[62820]: DEBUG nova.compute.provider_tree [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1590.740564] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695833, 'name': Rename_Task, 'duration_secs': 0.404231} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.741078] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1590.741423] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac1470aa-bd3f-4b1c-8fe5-6879a5f582a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.747487] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1590.747487] env[62820]: value = "task-1695835" [ 1590.747487] env[62820]: _type = "Task" [ 1590.747487] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.757027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.759520] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.869481] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.057213] env[62820]: DEBUG oslo_vmware.api [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695834, 'name': PowerOffVM_Task, 'duration_secs': 0.176661} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.057491] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1591.057665] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1591.058272] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e5de23c-3472-4c6f-b39b-31f1b75ff581 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.099249] env[62820]: DEBUG oslo_concurrency.lockutils [req-9a895dba-809d-4559-953b-3e3e332df88d req-edea8604-077a-41db-b447-2fb471e11872 service nova] Releasing lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.129467] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1591.129707] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1591.129899] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Deleting the datastore file [datastore1] 3a325dbf-87fb-4f7e-a665-e5d181333a5c {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1591.130183] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b64d1554-8c28-4646-8206-56b7a137fab2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.136708] env[62820]: DEBUG oslo_vmware.api [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1591.136708] env[62820]: value = "task-1695837" [ 1591.136708] env[62820]: _type = "Task" [ 1591.136708] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.146322] env[62820]: DEBUG oslo_vmware.api [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.242429] env[62820]: DEBUG nova.scheduler.client.report [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1591.257892] env[62820]: DEBUG nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1591.259928] env[62820]: DEBUG oslo_vmware.api [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695835, 'name': PowerOnVM_Task, 'duration_secs': 0.450969} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.261170] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1591.261305] env[62820]: INFO nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1591.261476] env[62820]: DEBUG nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1591.262300] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3ebe02-84a3-4da8-9e8d-25b3f04f4aee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.285150] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1591.285676] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1591.285676] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1591.285783] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1591.285893] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1591.286055] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1591.286360] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1591.286424] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1591.286599] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1591.286801] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1591.287012] env[62820]: DEBUG nova.virt.hardware [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1591.287798] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f369fa35-5744-4fee-83e1-53ecf8eaabc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.295994] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15045ad1-de43-4057-9ac0-7bcf857b027a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.362363] env[62820]: DEBUG nova.compute.manager [req-e95f30a7-c496-4589-b3e3-3463ab280285 req-7e98f7f1-e3d7-4ade-9d13-45578908cf15 service nova] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Received event network-vif-deleted-7f1b810c-dc19-4971-a532-bdac241941cf {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1591.464890] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1591.465152] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1591.465310] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1591.465501] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1591.465647] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1591.465794] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1591.466026] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1591.466179] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1591.466349] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1591.466510] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1591.466684] env[62820]: DEBUG nova.virt.hardware [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1591.467608] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e217ace-c766-441a-bfec-43540d201990 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.478495] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfea8a6-c4f3-4ce0-807e-3626f3df55c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.491627] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1591.497120] env[62820]: DEBUG oslo.service.loopingcall [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.497633] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1591.497850] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d8ba017-8903-48f4-ba7b-0ae30af25c68 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.513963] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1591.513963] env[62820]: value = "task-1695838" [ 1591.513963] env[62820]: _type = "Task" [ 1591.513963] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.521467] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695838, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.648060] env[62820]: DEBUG oslo_vmware.api [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209195} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.648060] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1591.648060] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1591.648060] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1591.648060] env[62820]: INFO nova.compute.manager [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1591.648060] env[62820]: DEBUG oslo.service.loopingcall [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.648424] env[62820]: DEBUG nova.compute.manager [-] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1591.648424] env[62820]: DEBUG nova.network.neutron [-] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1591.748856] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.027s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.751454] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.456s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.753091] env[62820]: INFO nova.compute.claims [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1591.770439] env[62820]: INFO nova.scheduler.client.report [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Deleted allocations for instance a06d736c-a704-46e8-a6f7-85d8be40804f [ 1591.788699] env[62820]: INFO nova.compute.manager [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Took 38.60 seconds to build instance. [ 1591.971032] env[62820]: DEBUG nova.compute.manager [req-925e2eaf-1f04-4935-ac7b-402e91e5a9a1 req-35e0b712-d58e-4b0e-bdf9-5cc184bddcff service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Received event network-vif-deleted-b8d4a717-efe2-46fe-ab6a-186bf8529c92 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1591.971605] env[62820]: INFO nova.compute.manager [req-925e2eaf-1f04-4935-ac7b-402e91e5a9a1 req-35e0b712-d58e-4b0e-bdf9-5cc184bddcff service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Neutron deleted interface b8d4a717-efe2-46fe-ab6a-186bf8529c92; detaching it from the instance and deleting it from the info cache [ 1591.971605] env[62820]: DEBUG nova.network.neutron [req-925e2eaf-1f04-4935-ac7b-402e91e5a9a1 req-35e0b712-d58e-4b0e-bdf9-5cc184bddcff service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.024165] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695838, 'name': CreateVM_Task, 'duration_secs': 0.308657} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.024413] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1592.024757] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.024919] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.025632] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1592.025632] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8b3e4ca-dc7b-4d58-80bc-69d27f4c16c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.029975] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1592.029975] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d43d17-f0b9-2caf-2423-3b3c5719d4c9" [ 1592.029975] env[62820]: _type = "Task" [ 1592.029975] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.038045] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d43d17-f0b9-2caf-2423-3b3c5719d4c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.231464] env[62820]: DEBUG nova.network.neutron [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Successfully updated port: 89fa6298-ff56-4900-8160-84554ea1e23c {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1592.283086] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4cea25c4-ac64-4ac5-bff4-b27934074785 tempest-VolumesAdminNegativeTest-908146155 tempest-VolumesAdminNegativeTest-908146155-project-member] Lock "a06d736c-a704-46e8-a6f7-85d8be40804f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.344s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.290931] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e6f94c2a-0623-46cd-b028-70f81cfc4815 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.278s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.437482] env[62820]: DEBUG nova.network.neutron [-] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.474680] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43d48fcb-2851-4964-9cab-84e2370dcd8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.487432] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbfec76-9900-42ec-a86e-c9aecdfa2b5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.523413] env[62820]: DEBUG nova.compute.manager [req-925e2eaf-1f04-4935-ac7b-402e91e5a9a1 req-35e0b712-d58e-4b0e-bdf9-5cc184bddcff service nova] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Detach interface failed, port_id=b8d4a717-efe2-46fe-ab6a-186bf8529c92, reason: Instance 3a325dbf-87fb-4f7e-a665-e5d181333a5c could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1592.539206] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d43d17-f0b9-2caf-2423-3b3c5719d4c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009792} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.539531] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.539757] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1592.539991] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.540165] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.540382] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1592.540669] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae080d97-61e3-4cee-8e5e-c5d03f8fcb33 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.549884] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1592.549884] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1592.550987] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9de27a8-7b3d-47e9-9f50-e19ab6625e62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.557314] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1592.557314] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52fdc19d-2426-0a44-cbb1-c9a3b19b8a3c" [ 1592.557314] env[62820]: _type = "Task" [ 1592.557314] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.564737] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fdc19d-2426-0a44-cbb1-c9a3b19b8a3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.734332] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-ba5b0055-b756-4f80-ba6b-7e8b705d2970" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.734493] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-ba5b0055-b756-4f80-ba6b-7e8b705d2970" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.734649] env[62820]: DEBUG nova.network.neutron [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1592.879582] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.879849] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.879990] env[62820]: INFO nova.compute.manager [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Shelving [ 1592.939802] env[62820]: INFO nova.compute.manager [-] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Took 1.29 seconds to deallocate network for instance. [ 1593.067509] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fdc19d-2426-0a44-cbb1-c9a3b19b8a3c, 'name': SearchDatastore_Task, 'duration_secs': 0.008768} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.068325] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8eb93210-2017-4267-880d-cc93db6e70f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.073297] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1593.073297] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5231a14a-2308-0b1e-1006-29ba56285147" [ 1593.073297] env[62820]: _type = "Task" [ 1593.073297] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.079372] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b543743-57fb-4f9b-ba8e-8197c488abb6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.084453] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5231a14a-2308-0b1e-1006-29ba56285147, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.088825] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f0b27a-8163-4610-9645-abbfff5b357a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.125201] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e355b2-3ba4-4649-bcb5-1df475c517ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.133257] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0731f6-9706-4429-8639-d8415872bb20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.149919] env[62820]: DEBUG nova.compute.provider_tree [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1593.267845] env[62820]: DEBUG nova.network.neutron [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1593.386851] env[62820]: DEBUG nova.compute.manager [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Received event network-vif-plugged-89fa6298-ff56-4900-8160-84554ea1e23c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1593.387086] env[62820]: DEBUG oslo_concurrency.lockutils [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] Acquiring lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.387398] env[62820]: DEBUG oslo_concurrency.lockutils [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] Lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.387549] env[62820]: DEBUG oslo_concurrency.lockutils [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] Lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.387693] env[62820]: DEBUG nova.compute.manager [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] No waiting events found dispatching network-vif-plugged-89fa6298-ff56-4900-8160-84554ea1e23c {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1593.387817] env[62820]: WARNING nova.compute.manager [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Received unexpected event network-vif-plugged-89fa6298-ff56-4900-8160-84554ea1e23c for instance with vm_state building and task_state spawning. [ 1593.388020] env[62820]: DEBUG nova.compute.manager [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Received event network-changed-89fa6298-ff56-4900-8160-84554ea1e23c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1593.388134] env[62820]: DEBUG nova.compute.manager [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Refreshing instance network info cache due to event network-changed-89fa6298-ff56-4900-8160-84554ea1e23c. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1593.388323] env[62820]: DEBUG oslo_concurrency.lockutils [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] Acquiring lock "refresh_cache-ba5b0055-b756-4f80-ba6b-7e8b705d2970" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.403494] env[62820]: DEBUG nova.network.neutron [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Updating instance_info_cache with network_info: [{"id": "89fa6298-ff56-4900-8160-84554ea1e23c", "address": "fa:16:3e:c9:de:af", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89fa6298-ff", "ovs_interfaceid": "89fa6298-ff56-4900-8160-84554ea1e23c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.446554] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.583901] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5231a14a-2308-0b1e-1006-29ba56285147, 'name': SearchDatastore_Task, 'duration_secs': 0.010294} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.584173] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.584433] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0/2587a273-0115-483a-ba5e-994c87bbc4d0.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1593.584694] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98039413-3be3-4329-9ddc-2923eb4fcb0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.591749] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1593.591749] env[62820]: value = "task-1695839" [ 1593.591749] env[62820]: _type = "Task" [ 1593.591749] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.599545] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.653747] env[62820]: DEBUG nova.scheduler.client.report [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1593.892654] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1593.892907] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0cea2219-3b8c-496f-9f18-f1b6a1528f6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.902831] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1593.902831] env[62820]: value = "task-1695840" [ 1593.902831] env[62820]: _type = "Task" [ 1593.902831] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.907259] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-ba5b0055-b756-4f80-ba6b-7e8b705d2970" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.907529] env[62820]: DEBUG nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Instance network_info: |[{"id": "89fa6298-ff56-4900-8160-84554ea1e23c", "address": "fa:16:3e:c9:de:af", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89fa6298-ff", "ovs_interfaceid": "89fa6298-ff56-4900-8160-84554ea1e23c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1593.907910] env[62820]: DEBUG oslo_concurrency.lockutils [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] Acquired lock "refresh_cache-ba5b0055-b756-4f80-ba6b-7e8b705d2970" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.908095] env[62820]: DEBUG nova.network.neutron [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Refreshing network info cache for port 89fa6298-ff56-4900-8160-84554ea1e23c {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1593.910382] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:de:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a874c214-8cdf-4a41-a718-84262b2a28d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89fa6298-ff56-4900-8160-84554ea1e23c', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1593.922266] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating folder: Project (e3a07ed2a19149b3a58ee43a07e13bba). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1593.924672] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4da186d9-b332-4458-8086-1696220a53ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.933891] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.945049] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created folder: Project (e3a07ed2a19149b3a58ee43a07e13bba) in parent group-v353379. [ 1593.945742] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating folder: Instances. Parent ref: group-v353583. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1593.945742] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50961f92-5237-4a52-950a-a76efd4badf8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.957387] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created folder: Instances in parent group-v353583. [ 1593.957670] env[62820]: DEBUG oslo.service.loopingcall [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.957870] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1593.958104] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26d203f3-65a2-49aa-8274-5d58479781ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.979287] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1593.979287] env[62820]: value = "task-1695843" [ 1593.979287] env[62820]: _type = "Task" [ 1593.979287] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.992808] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695843, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.105843] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695839, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.159049] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.159795] env[62820]: DEBUG nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1594.162927] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.538s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.163205] env[62820]: DEBUG nova.objects.instance [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lazy-loading 'resources' on Instance uuid a150a0d8-afcc-4a5b-a014-2c25a9bc4f07 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1594.412856] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695840, 'name': PowerOffVM_Task, 'duration_secs': 0.291626} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.413129] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1594.413926] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b024c55-381b-4c51-b52e-419c511d8420 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.452628] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691e2bf4-c4a0-4543-859e-2361e07990c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.488929] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695843, 'name': CreateVM_Task, 'duration_secs': 0.409948} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.489147] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1594.489840] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.490014] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.490344] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1594.491212] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bbf243a-da63-44d2-9bf3-c8eba04e697d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.496329] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1594.496329] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d75937-28e4-84f9-83d7-fbc3138aa095" [ 1594.496329] env[62820]: _type = "Task" [ 1594.496329] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.503876] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d75937-28e4-84f9-83d7-fbc3138aa095, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.602643] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695839, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528765} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.602930] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0/2587a273-0115-483a-ba5e-994c87bbc4d0.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1594.603165] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1594.603427] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b5c6830-4876-4647-b6af-4a423f674927 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.610475] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1594.610475] env[62820]: value = "task-1695844" [ 1594.610475] env[62820]: _type = "Task" [ 1594.610475] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.618697] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695844, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.666742] env[62820]: DEBUG nova.compute.utils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1594.671673] env[62820]: DEBUG nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1594.671673] env[62820]: DEBUG nova.network.neutron [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1594.721503] env[62820]: DEBUG nova.policy [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fc30b5328e44f21a88fbcaedafe5a2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e5642bbb5de4060be9d4d0ae0f8d6a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1594.963783] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1594.966644] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5025b014-66e0-4826-b2dc-c5d434163b30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.975606] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1594.975606] env[62820]: value = "task-1695845" [ 1594.975606] env[62820]: _type = "Task" [ 1594.975606] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.976372] env[62820]: DEBUG nova.network.neutron [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Updated VIF entry in instance network info cache for port 89fa6298-ff56-4900-8160-84554ea1e23c. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1594.976690] env[62820]: DEBUG nova.network.neutron [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Updating instance_info_cache with network_info: [{"id": "89fa6298-ff56-4900-8160-84554ea1e23c", "address": "fa:16:3e:c9:de:af", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89fa6298-ff", "ovs_interfaceid": "89fa6298-ff56-4900-8160-84554ea1e23c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.989271] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695845, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.006070] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d75937-28e4-84f9-83d7-fbc3138aa095, 'name': SearchDatastore_Task, 'duration_secs': 0.008379} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.008643] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.008887] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1595.009163] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.009318] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.009500] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1595.009933] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfaa9325-4ce0-486e-8ad2-d55c07075331 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.017905] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1595.018109] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1595.018840] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f14f80e-d60c-4ff5-a755-c8bffcb129a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.026540] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1595.026540] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5200a1c6-0c56-217c-b778-deaca67b8f4e" [ 1595.026540] env[62820]: _type = "Task" [ 1595.026540] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.033740] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5200a1c6-0c56-217c-b778-deaca67b8f4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.081286] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b22cbe7-9be4-4e81-82f5-125734568fe3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.088695] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62ac799-aff9-48cd-8cf5-56cc7257a8a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.129186] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa9e613-e995-4f52-9cc5-e8138f1c6340 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.137496] env[62820]: DEBUG nova.network.neutron [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Successfully created port: 1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1595.143730] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b76b5fd-54c8-42b4-bd20-240c4c4f25c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.146972] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695844, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064604} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.147650] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1595.148884] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38b74e9-49f3-4cf0-948a-3c84079f0ef3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.159640] env[62820]: DEBUG nova.compute.provider_tree [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.179539] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0/2587a273-0115-483a-ba5e-994c87bbc4d0.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1595.180258] env[62820]: DEBUG nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1595.182826] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22c6dfc1-7b45-4ec7-aed0-b61f856c57e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.203309] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1595.203309] env[62820]: value = "task-1695846" [ 1595.203309] env[62820]: _type = "Task" [ 1595.203309] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.212126] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695846, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.484123] env[62820]: DEBUG oslo_concurrency.lockutils [req-f0227233-ba77-49a0-a6fd-679a9ed29643 req-eac8982e-3a06-48ba-a51d-723b86ca4c7d service nova] Releasing lock "refresh_cache-ba5b0055-b756-4f80-ba6b-7e8b705d2970" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.493784] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695845, 'name': CreateSnapshot_Task, 'duration_secs': 0.393686} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.494262] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1595.495487] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1224255b-0a07-4f17-80d1-abf282d2323a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.542591] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5200a1c6-0c56-217c-b778-deaca67b8f4e, 'name': SearchDatastore_Task, 'duration_secs': 0.008418} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.544070] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94c60ed7-f215-4c4d-8c81-022912674803 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.549780] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1595.549780] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5253b56a-f7d2-a1b9-1275-695ec54d8eb4" [ 1595.549780] env[62820]: _type = "Task" [ 1595.549780] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.557273] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5253b56a-f7d2-a1b9-1275-695ec54d8eb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.663691] env[62820]: DEBUG nova.scheduler.client.report [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1595.712608] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695846, 'name': ReconfigVM_Task, 'duration_secs': 0.333309} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.712608] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0/2587a273-0115-483a-ba5e-994c87bbc4d0.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1595.713940] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b13806b8-d38a-48f0-89c4-991c5781f0bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.721055] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1595.721055] env[62820]: value = "task-1695847" [ 1595.721055] env[62820]: _type = "Task" [ 1595.721055] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.726909] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695847, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.019227] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1596.019565] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a55b432f-76be-47c2-b75b-305bddb28782 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.028154] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1596.028154] env[62820]: value = "task-1695848" [ 1596.028154] env[62820]: _type = "Task" [ 1596.028154] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.038441] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695848, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.059921] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5253b56a-f7d2-a1b9-1275-695ec54d8eb4, 'name': SearchDatastore_Task, 'duration_secs': 0.01055} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.060207] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.060477] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970/ba5b0055-b756-4f80-ba6b-7e8b705d2970.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1596.060745] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b468fe92-e185-4dbc-9488-730558a0f7c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.070609] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1596.070609] env[62820]: value = "task-1695849" [ 1596.070609] env[62820]: _type = "Task" [ 1596.070609] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.079185] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.169735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.007s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.172593] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.788s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.174440] env[62820]: INFO nova.compute.claims [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1596.197347] env[62820]: INFO nova.scheduler.client.report [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Deleted allocations for instance a150a0d8-afcc-4a5b-a014-2c25a9bc4f07 [ 1596.207405] env[62820]: DEBUG nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1596.234055] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695847, 'name': Rename_Task, 'duration_secs': 0.158506} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.237010] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1596.237798] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44aeb14d-3f66-4de8-8ac1-ffef2baca816 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.247039] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1596.247039] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1596.247039] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1596.247039] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1596.247039] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1596.247039] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1596.247039] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1596.247729] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1596.248136] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1596.248720] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1596.249056] env[62820]: DEBUG nova.virt.hardware [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1596.252189] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9cf945-a221-4931-ae1a-9379705debf7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.256638] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1596.256638] env[62820]: value = "task-1695850" [ 1596.256638] env[62820]: _type = "Task" [ 1596.256638] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.265933] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fac57df-b8a1-4d5e-858e-2e403424bb56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.279364] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695850, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.541927] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695848, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.580662] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695849, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500193} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.580940] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970/ba5b0055-b756-4f80-ba6b-7e8b705d2970.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1596.581170] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1596.581431] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65490cda-08fe-4278-8744-27b5b4dc60bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.588192] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1596.588192] env[62820]: value = "task-1695851" [ 1596.588192] env[62820]: _type = "Task" [ 1596.588192] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.595687] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695851, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.607524] env[62820]: DEBUG nova.compute.manager [req-64e1f802-9bdc-4912-b721-44478552b94a req-f2e65567-0b51-4083-9eb2-09e0ae23ecb7 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Received event network-vif-plugged-1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1596.607779] env[62820]: DEBUG oslo_concurrency.lockutils [req-64e1f802-9bdc-4912-b721-44478552b94a req-f2e65567-0b51-4083-9eb2-09e0ae23ecb7 service nova] Acquiring lock "3228cd34-2144-425a-aca6-400cb0991e43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.608016] env[62820]: DEBUG oslo_concurrency.lockutils [req-64e1f802-9bdc-4912-b721-44478552b94a req-f2e65567-0b51-4083-9eb2-09e0ae23ecb7 service nova] Lock "3228cd34-2144-425a-aca6-400cb0991e43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.608530] env[62820]: DEBUG oslo_concurrency.lockutils [req-64e1f802-9bdc-4912-b721-44478552b94a req-f2e65567-0b51-4083-9eb2-09e0ae23ecb7 service nova] Lock "3228cd34-2144-425a-aca6-400cb0991e43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.608727] env[62820]: DEBUG nova.compute.manager [req-64e1f802-9bdc-4912-b721-44478552b94a req-f2e65567-0b51-4083-9eb2-09e0ae23ecb7 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] No waiting events found dispatching network-vif-plugged-1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1596.608957] env[62820]: WARNING nova.compute.manager [req-64e1f802-9bdc-4912-b721-44478552b94a req-f2e65567-0b51-4083-9eb2-09e0ae23ecb7 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Received unexpected event network-vif-plugged-1c306539-7756-458b-84e7-61bfbc0c7f35 for instance with vm_state building and task_state spawning. [ 1596.707404] env[62820]: DEBUG oslo_concurrency.lockutils [None req-caccf271-bcac-44fe-bbda-b384b14cb1d0 tempest-ImagesOneServerNegativeTestJSON-745129021 tempest-ImagesOneServerNegativeTestJSON-745129021-project-member] Lock "a150a0d8-afcc-4a5b-a014-2c25a9bc4f07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.970s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.708220] env[62820]: DEBUG nova.network.neutron [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Successfully updated port: 1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1596.768379] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695850, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.038814] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695848, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.102251] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695851, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069115} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.102801] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1597.103682] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8b51c4-f114-4235-a4c3-e7e167e12564 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.126414] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970/ba5b0055-b756-4f80-ba6b-7e8b705d2970.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1597.126414] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05058a00-d15c-4e8c-ae17-c5ef67222f2a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.149611] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1597.149611] env[62820]: value = "task-1695852" [ 1597.149611] env[62820]: _type = "Task" [ 1597.149611] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.159303] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695852, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.210907] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.210907] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.211099] env[62820]: DEBUG nova.network.neutron [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1597.268413] env[62820]: DEBUG oslo_vmware.api [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695850, 'name': PowerOnVM_Task, 'duration_secs': 0.628095} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.268720] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1597.268929] env[62820]: DEBUG nova.compute.manager [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1597.272050] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89278cc-e408-4a5a-8cca-318ea8ebc741 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.523032] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fcb3fd-3ca2-46ac-aa15-01dd41b545ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.530239] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f0b59d-7641-4692-a163-31ebb460dccc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.542874] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695848, 'name': CloneVM_Task, 'duration_secs': 1.255984} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.570378] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Created linked-clone VM from snapshot [ 1597.571392] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1db1a0-62ab-46b8-af95-9ba401d86eab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.574298] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11488ef-96bf-4aaa-b2e7-f8de949b85c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.581986] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Uploading image c8fb3f1e-3f03-44f1-baf3-ae4839a4ac9f {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1597.586952] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcf95fc-0910-4096-bb86-5c0c6f309818 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.609348] env[62820]: DEBUG nova.compute.provider_tree [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1597.615319] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1597.615319] env[62820]: value = "vm-353587" [ 1597.615319] env[62820]: _type = "VirtualMachine" [ 1597.615319] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1597.615567] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-221e37ac-e25b-4f71-976e-7ec1f3adf600 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.622556] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lease: (returnval){ [ 1597.622556] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527ef944-2994-b9b8-a322-c79ec23c6fe9" [ 1597.622556] env[62820]: _type = "HttpNfcLease" [ 1597.622556] env[62820]: } obtained for exporting VM: (result){ [ 1597.622556] env[62820]: value = "vm-353587" [ 1597.622556] env[62820]: _type = "VirtualMachine" [ 1597.622556] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1597.622930] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the lease: (returnval){ [ 1597.622930] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527ef944-2994-b9b8-a322-c79ec23c6fe9" [ 1597.622930] env[62820]: _type = "HttpNfcLease" [ 1597.622930] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1597.629432] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1597.629432] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527ef944-2994-b9b8-a322-c79ec23c6fe9" [ 1597.629432] env[62820]: _type = "HttpNfcLease" [ 1597.629432] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1597.659191] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695852, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.757159] env[62820]: DEBUG nova.network.neutron [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1597.791118] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.024094] env[62820]: DEBUG nova.network.neutron [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [{"id": "1c306539-7756-458b-84e7-61bfbc0c7f35", "address": "fa:16:3e:68:a0:af", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c306539-77", "ovs_interfaceid": "1c306539-7756-458b-84e7-61bfbc0c7f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.110410] env[62820]: DEBUG nova.scheduler.client.report [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1598.132149] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1598.132149] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527ef944-2994-b9b8-a322-c79ec23c6fe9" [ 1598.132149] env[62820]: _type = "HttpNfcLease" [ 1598.132149] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1598.132463] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1598.132463] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527ef944-2994-b9b8-a322-c79ec23c6fe9" [ 1598.132463] env[62820]: _type = "HttpNfcLease" [ 1598.132463] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1598.133474] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d08b20-c14d-4b0e-85a0-4ba3b3c62a2c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.141684] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5258a3e0-d092-1129-13b1-8f0b1393b058/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1598.141940] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5258a3e0-d092-1129-13b1-8f0b1393b058/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1598.210714] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695852, 'name': ReconfigVM_Task, 'duration_secs': 0.841133} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.211039] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Reconfigured VM instance instance-00000046 to attach disk [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970/ba5b0055-b756-4f80-ba6b-7e8b705d2970.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1598.211699] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ae2b333-e2a7-44ac-9cd4-d4f1e2fc5d93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.218836] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1598.218836] env[62820]: value = "task-1695854" [ 1598.218836] env[62820]: _type = "Task" [ 1598.218836] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.229690] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695854, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.233425] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4cd222c6-5b58-4418-8a8b-addc595bb532 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.530032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.530032] env[62820]: DEBUG nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Instance network_info: |[{"id": "1c306539-7756-458b-84e7-61bfbc0c7f35", "address": "fa:16:3e:68:a0:af", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c306539-77", "ovs_interfaceid": "1c306539-7756-458b-84e7-61bfbc0c7f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1598.530032] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:a0:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c306539-7756-458b-84e7-61bfbc0c7f35', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1598.539852] env[62820]: DEBUG oslo.service.loopingcall [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1598.540336] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1598.540979] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-771fa9ed-de7c-4243-bb9e-9231c04d79e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.566934] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1598.566934] env[62820]: value = "task-1695855" [ 1598.566934] env[62820]: _type = "Task" [ 1598.566934] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.577292] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695855, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.599735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "2587a273-0115-483a-ba5e-994c87bbc4d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.599735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "2587a273-0115-483a-ba5e-994c87bbc4d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.599735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "2587a273-0115-483a-ba5e-994c87bbc4d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.599735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "2587a273-0115-483a-ba5e-994c87bbc4d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.599735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "2587a273-0115-483a-ba5e-994c87bbc4d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.599735] env[62820]: INFO nova.compute.manager [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Terminating instance [ 1598.616622] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.617388] env[62820]: DEBUG nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1598.620683] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.563s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.623949] env[62820]: DEBUG nova.objects.instance [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lazy-loading 'resources' on Instance uuid 93e1a842-d598-4798-88ad-622ae5dbf057 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1598.733518] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695854, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.878975] env[62820]: DEBUG nova.compute.manager [req-61410854-4816-46f7-9613-18f158062d82 req-dacd092d-7d35-47e1-9c6b-745695d96440 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Received event network-changed-1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1598.879139] env[62820]: DEBUG nova.compute.manager [req-61410854-4816-46f7-9613-18f158062d82 req-dacd092d-7d35-47e1-9c6b-745695d96440 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Refreshing instance network info cache due to event network-changed-1c306539-7756-458b-84e7-61bfbc0c7f35. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1598.879447] env[62820]: DEBUG oslo_concurrency.lockutils [req-61410854-4816-46f7-9613-18f158062d82 req-dacd092d-7d35-47e1-9c6b-745695d96440 service nova] Acquiring lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.879622] env[62820]: DEBUG oslo_concurrency.lockutils [req-61410854-4816-46f7-9613-18f158062d82 req-dacd092d-7d35-47e1-9c6b-745695d96440 service nova] Acquired lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.880118] env[62820]: DEBUG nova.network.neutron [req-61410854-4816-46f7-9613-18f158062d82 req-dacd092d-7d35-47e1-9c6b-745695d96440 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Refreshing network info cache for port 1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1599.078876] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695855, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.105028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "refresh_cache-2587a273-0115-483a-ba5e-994c87bbc4d0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.105534] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired lock "refresh_cache-2587a273-0115-483a-ba5e-994c87bbc4d0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.105712] env[62820]: DEBUG nova.network.neutron [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1599.125515] env[62820]: DEBUG nova.compute.utils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1599.129166] env[62820]: DEBUG nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1599.129500] env[62820]: DEBUG nova.network.neutron [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1599.215177] env[62820]: DEBUG nova.policy [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '381a74ba1e8f40a387dfd744cdd25ce4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14788b1c55684c2fbd3c07bff18757f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1599.230972] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695854, 'name': Rename_Task, 'duration_secs': 0.884433} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.234759] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1599.236207] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bde6ca2-c718-40e8-b87e-95fc5d39bf46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.245337] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1599.245337] env[62820]: value = "task-1695856" [ 1599.245337] env[62820]: _type = "Task" [ 1599.245337] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.262059] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695856, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.593777] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695855, 'name': CreateVM_Task, 'duration_secs': 0.879007} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.597318] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1599.599221] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.599506] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.599933] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1599.600278] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e06a9c6-930a-4ba6-992c-4c7eca547808 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.612078] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1599.612078] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521c8886-9b88-2033-d0ed-d7cc8a5cf044" [ 1599.612078] env[62820]: _type = "Task" [ 1599.612078] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.622599] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521c8886-9b88-2033-d0ed-d7cc8a5cf044, 'name': SearchDatastore_Task, 'duration_secs': 0.011931} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.623370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.623370] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1599.624156] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.624156] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.624156] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1599.624156] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47d88b74-0255-4ea3-95b2-15d20bc556d0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.630464] env[62820]: DEBUG nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1599.635258] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdbf339-8751-4449-b67f-585a5cca8500 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.639671] env[62820]: DEBUG nova.network.neutron [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1599.650194] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9b0ac1-5d16-495c-b53a-8ba5f0c787bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.655591] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1599.655591] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1599.656836] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-072f2fc4-a0b5-4195-b2f1-f1540fd60ede {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.692382] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5ca199-7484-4b9a-a6c8-362d0f5db1f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.697408] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1599.697408] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ccbff6-d1cc-4257-24a2-19de0536e41e" [ 1599.697408] env[62820]: _type = "Task" [ 1599.697408] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.698367] env[62820]: DEBUG nova.network.neutron [req-61410854-4816-46f7-9613-18f158062d82 req-dacd092d-7d35-47e1-9c6b-745695d96440 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updated VIF entry in instance network info cache for port 1c306539-7756-458b-84e7-61bfbc0c7f35. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1599.698807] env[62820]: DEBUG nova.network.neutron [req-61410854-4816-46f7-9613-18f158062d82 req-dacd092d-7d35-47e1-9c6b-745695d96440 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [{"id": "1c306539-7756-458b-84e7-61bfbc0c7f35", "address": "fa:16:3e:68:a0:af", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c306539-77", "ovs_interfaceid": "1c306539-7756-458b-84e7-61bfbc0c7f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.709228] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ab6bd2-6068-439d-a4de-1f758f49ae6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.718464] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ccbff6-d1cc-4257-24a2-19de0536e41e, 'name': SearchDatastore_Task, 'duration_secs': 0.012956} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.720203] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32ad949e-0efd-4e78-bf2d-f7719411f2c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.732458] env[62820]: DEBUG nova.compute.provider_tree [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1599.741876] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1599.741876] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5275e104-401f-57b1-1c47-51ec0907cf68" [ 1599.741876] env[62820]: _type = "Task" [ 1599.741876] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.751283] env[62820]: DEBUG nova.network.neutron [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.757592] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5275e104-401f-57b1-1c47-51ec0907cf68, 'name': SearchDatastore_Task, 'duration_secs': 0.014669} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.758598] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.758598] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43/3228cd34-2144-425a-aca6-400cb0991e43.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1599.766032] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d367a65-2177-419b-ad41-56b51b355e7a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.769295] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695856, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.776082] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1599.776082] env[62820]: value = "task-1695857" [ 1599.776082] env[62820]: _type = "Task" [ 1599.776082] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.787213] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.823304] env[62820]: DEBUG nova.network.neutron [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Successfully created port: 8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1600.203150] env[62820]: DEBUG oslo_concurrency.lockutils [req-61410854-4816-46f7-9613-18f158062d82 req-dacd092d-7d35-47e1-9c6b-745695d96440 service nova] Releasing lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.237908] env[62820]: DEBUG nova.scheduler.client.report [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1600.263778] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Releasing lock "refresh_cache-2587a273-0115-483a-ba5e-994c87bbc4d0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.264379] env[62820]: DEBUG nova.compute.manager [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1600.265017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1600.265972] env[62820]: DEBUG oslo_vmware.api [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695856, 'name': PowerOnVM_Task, 'duration_secs': 0.528647} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.270300] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a56598-92d4-418f-b841-2f10932f22c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.275990] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1600.276371] env[62820]: INFO nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1600.276899] env[62820]: DEBUG nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1600.280066] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbcce88-a91b-4bdb-993f-3357c2f4d64d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.299536] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1600.300617] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab87d79c-02ff-4b64-b5ce-c89b03ad1b5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.315727] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695857, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.323612] env[62820]: DEBUG oslo_vmware.api [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1600.323612] env[62820]: value = "task-1695858" [ 1600.323612] env[62820]: _type = "Task" [ 1600.323612] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.339731] env[62820]: DEBUG oslo_vmware.api [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.646062] env[62820]: DEBUG nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1600.674859] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1600.675149] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1600.675506] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1600.675506] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1600.675676] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1600.675838] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1600.676074] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1600.676269] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1600.676527] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1600.677317] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1600.677317] env[62820]: DEBUG nova.virt.hardware [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1600.678148] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9eda7e8-364a-4680-9020-c1de91234884 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.686466] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a13ad2-62e9-4215-a8bb-81f1dbd7de70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.755277] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.134s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.757792] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.039s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.758054] env[62820]: DEBUG nova.objects.instance [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lazy-loading 'resources' on Instance uuid b7806d81-eb2d-4724-8c40-ed88c8c77870 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1600.775359] env[62820]: INFO nova.scheduler.client.report [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted allocations for instance 93e1a842-d598-4798-88ad-622ae5dbf057 [ 1600.792412] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627441} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.792769] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43/3228cd34-2144-425a-aca6-400cb0991e43.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1600.793040] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1600.793355] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e5d1c3c-78ff-47dd-a9f6-ed6a2ad12b88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.801983] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1600.801983] env[62820]: value = "task-1695859" [ 1600.801983] env[62820]: _type = "Task" [ 1600.801983] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.812999] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.828845] env[62820]: INFO nova.compute.manager [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Took 31.12 seconds to build instance. [ 1600.842692] env[62820]: DEBUG oslo_vmware.api [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695858, 'name': PowerOffVM_Task, 'duration_secs': 0.197902} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.843023] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1600.843168] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1600.843480] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c524cc6-85e3-4d08-b12f-e4f670ab8572 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.872820] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1600.872820] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1600.872820] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Deleting the datastore file [datastore1] 2587a273-0115-483a-ba5e-994c87bbc4d0 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1600.872820] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e31c6afe-3b91-425f-8ce7-8338cea49ada {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.883021] env[62820]: DEBUG oslo_vmware.api [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1600.883021] env[62820]: value = "task-1695861" [ 1600.883021] env[62820]: _type = "Task" [ 1600.883021] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.891358] env[62820]: DEBUG oslo_vmware.api [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695861, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.033699] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "361b7da3-0e8c-4291-aba0-8b6116b8032f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.034113] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "361b7da3-0e8c-4291-aba0-8b6116b8032f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.070501] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.071220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.290177] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b0407ed-32bb-476e-bb40-8f0d6da37ec3 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "93e1a842-d598-4798-88ad-622ae5dbf057" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.212s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.311987] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079688} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.315272] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1601.316483] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b7e921-a514-4015-9f8d-a47e09ae0ad6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.339190] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43/3228cd34-2144-425a-aca6-400cb0991e43.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1601.342127] env[62820]: DEBUG oslo_concurrency.lockutils [None req-df4df33b-6d5d-4c68-aa36-aa291ce14476 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.973s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.342747] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf43741c-1cd1-425e-949b-5127616f161c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.363804] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1601.363804] env[62820]: value = "task-1695862" [ 1601.363804] env[62820]: _type = "Task" [ 1601.363804] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.375970] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695862, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.403675] env[62820]: DEBUG oslo_vmware.api [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189989} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.404169] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1601.404372] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1601.404592] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1601.404706] env[62820]: INFO nova.compute.manager [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1601.404962] env[62820]: DEBUG oslo.service.loopingcall [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1601.409024] env[62820]: DEBUG nova.compute.manager [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1601.409024] env[62820]: DEBUG nova.network.neutron [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1601.425789] env[62820]: DEBUG nova.network.neutron [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1601.536373] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1601.574181] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1601.721117] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961fd8bb-6262-4e4d-9c3f-a27013e06a75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.731150] env[62820]: DEBUG nova.network.neutron [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Successfully updated port: 8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1601.732613] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f90a3c-00ca-4a72-bc0f-4c6904425a11 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.764994] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1477900b-2372-45b3-8b52-56b810fa2215 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.774716] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4161af-023f-4052-bbdd-a89f3b30b26c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.794600] env[62820]: DEBUG nova.compute.provider_tree [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1601.800081] env[62820]: DEBUG nova.compute.manager [req-4fafe149-adb0-490d-ba44-9fb5e93b27c0 req-34a045f5-743c-4427-bd35-61bde1837d0c service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received event network-vif-plugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1601.800081] env[62820]: DEBUG oslo_concurrency.lockutils [req-4fafe149-adb0-490d-ba44-9fb5e93b27c0 req-34a045f5-743c-4427-bd35-61bde1837d0c service nova] Acquiring lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.800081] env[62820]: DEBUG oslo_concurrency.lockutils [req-4fafe149-adb0-490d-ba44-9fb5e93b27c0 req-34a045f5-743c-4427-bd35-61bde1837d0c service nova] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.800081] env[62820]: DEBUG oslo_concurrency.lockutils [req-4fafe149-adb0-490d-ba44-9fb5e93b27c0 req-34a045f5-743c-4427-bd35-61bde1837d0c service nova] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.800081] env[62820]: DEBUG nova.compute.manager [req-4fafe149-adb0-490d-ba44-9fb5e93b27c0 req-34a045f5-743c-4427-bd35-61bde1837d0c service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] No waiting events found dispatching network-vif-plugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1601.800081] env[62820]: WARNING nova.compute.manager [req-4fafe149-adb0-490d-ba44-9fb5e93b27c0 req-34a045f5-743c-4427-bd35-61bde1837d0c service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received unexpected event network-vif-plugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 for instance with vm_state building and task_state spawning. [ 1601.883311] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695862, 'name': ReconfigVM_Task, 'duration_secs': 0.506364} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.883671] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43/3228cd34-2144-425a-aca6-400cb0991e43.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1601.884364] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbcf983a-3645-4fd9-9abc-a6c927e63dfa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.892884] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1601.892884] env[62820]: value = "task-1695863" [ 1601.892884] env[62820]: _type = "Task" [ 1601.892884] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.910372] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695863, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.931933] env[62820]: DEBUG nova.network.neutron [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.933457] env[62820]: INFO nova.compute.manager [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Rebuilding instance [ 1601.990221] env[62820]: DEBUG nova.compute.manager [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1601.991335] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6de7245-d4f1-40f3-9435-a08fbfabb4f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.003840] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "9114a81d-86a9-493b-9c07-c4724a0588ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.004130] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "9114a81d-86a9-493b-9c07-c4724a0588ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.004360] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "9114a81d-86a9-493b-9c07-c4724a0588ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.004517] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "9114a81d-86a9-493b-9c07-c4724a0588ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.005186] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "9114a81d-86a9-493b-9c07-c4724a0588ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.006634] env[62820]: INFO nova.compute.manager [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Terminating instance [ 1602.071369] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.101116] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.238110] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.239717] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.239908] env[62820]: DEBUG nova.network.neutron [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1602.302102] env[62820]: DEBUG nova.scheduler.client.report [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1602.403477] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695863, 'name': Rename_Task, 'duration_secs': 0.219747} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.403882] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1602.404185] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ca5d7c7-49fa-4087-ba04-1ad278d808f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.410964] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1602.410964] env[62820]: value = "task-1695864" [ 1602.410964] env[62820]: _type = "Task" [ 1602.410964] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.419175] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.438857] env[62820]: INFO nova.compute.manager [-] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Took 1.03 seconds to deallocate network for instance. [ 1602.511295] env[62820]: DEBUG nova.compute.manager [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1602.511546] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1602.513009] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311151e0-c2c9-45f9-90d2-273abeb7f56a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.521533] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1602.521801] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58902957-06b6-4835-99de-3b8e9bdc7b86 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.529217] env[62820]: DEBUG oslo_vmware.api [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1602.529217] env[62820]: value = "task-1695865" [ 1602.529217] env[62820]: _type = "Task" [ 1602.529217] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.537752] env[62820]: DEBUG oslo_vmware.api [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695865, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.774800] env[62820]: DEBUG nova.network.neutron [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1602.809609] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.052s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.812761] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.058s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.812945] env[62820]: DEBUG nova.objects.instance [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lazy-loading 'resources' on Instance uuid 069f58d6-f6bc-4ded-8274-6fed7c2f45b3 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1602.831191] env[62820]: INFO nova.scheduler.client.report [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Deleted allocations for instance b7806d81-eb2d-4724-8c40-ed88c8c77870 [ 1602.923484] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695864, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.930091] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "0d519bc8-3cc1-429e-b41b-ed0035622562" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.930575] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "0d519bc8-3cc1-429e-b41b-ed0035622562" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.946122] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.977682] env[62820]: DEBUG nova.network.neutron [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [{"id": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "address": "fa:16:3e:ed:27:27", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c81ac6d-fc", "ovs_interfaceid": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.008123] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1603.008472] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7569c1ed-f81c-4351-9b15-698e27d99176 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.016820] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1603.016820] env[62820]: value = "task-1695866" [ 1603.016820] env[62820]: _type = "Task" [ 1603.016820] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.025746] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.038558] env[62820]: DEBUG oslo_vmware.api [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695865, 'name': PowerOffVM_Task, 'duration_secs': 0.471387} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.038865] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1603.039055] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1603.039321] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24236960-d3d7-47e5-a3a7-0ccb60aa492f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.121568] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1603.121860] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1603.122115] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleting the datastore file [datastore1] 9114a81d-86a9-493b-9c07-c4724a0588ac {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1603.122455] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acbaf595-3de5-4ab3-9a22-e7ee2872f610 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.130146] env[62820]: DEBUG oslo_vmware.api [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1603.130146] env[62820]: value = "task-1695868" [ 1603.130146] env[62820]: _type = "Task" [ 1603.130146] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.139560] env[62820]: DEBUG oslo_vmware.api [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695868, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.339950] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5e269f84-a6f0-45c9-806e-2f4be48ad739 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "b7806d81-eb2d-4724-8c40-ed88c8c77870" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.608s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.421893] env[62820]: DEBUG oslo_vmware.api [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1695864, 'name': PowerOnVM_Task, 'duration_secs': 0.954611} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.424521] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1603.424771] env[62820]: INFO nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Took 7.22 seconds to spawn the instance on the hypervisor. [ 1603.424958] env[62820]: DEBUG nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1603.426394] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc20829e-2565-4570-b4f4-914731c0864b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.432804] env[62820]: DEBUG nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1603.480926] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.481431] env[62820]: DEBUG nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Instance network_info: |[{"id": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "address": "fa:16:3e:ed:27:27", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c81ac6d-fc", "ovs_interfaceid": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1603.481958] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:27:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c81ac6d-fc1a-4519-81f6-1a3a523acee9', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1603.489981] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating folder: Project (14788b1c55684c2fbd3c07bff18757f9). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1603.493102] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-278759a4-807c-40de-9b59-d0d8c91b49e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.508194] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Created folder: Project (14788b1c55684c2fbd3c07bff18757f9) in parent group-v353379. [ 1603.508457] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating folder: Instances. Parent ref: group-v353589. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1603.508725] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b13993e-5e20-4bca-90a0-89c5501702f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.522377] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Created folder: Instances in parent group-v353589. [ 1603.522651] env[62820]: DEBUG oslo.service.loopingcall [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.522893] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1603.523523] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82f72ec1-5ba6-4d00-8053-5d9879cd23ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.543570] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695866, 'name': PowerOffVM_Task, 'duration_secs': 0.409906} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.544438] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1603.544752] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1603.545612] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd0d899-6fcc-46e7-8d4d-b89c57c97069 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.549396] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1603.549396] env[62820]: value = "task-1695871" [ 1603.549396] env[62820]: _type = "Task" [ 1603.549396] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.554864] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1603.557718] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e44e9ea-599e-4710-80c1-5b15e83ae765 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.562677] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695871, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.639358] env[62820]: DEBUG oslo_vmware.api [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695868, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.373704} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.642060] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1603.642314] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1603.642459] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1603.642634] env[62820]: INFO nova.compute.manager [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1603.642876] env[62820]: DEBUG oslo.service.loopingcall [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.643279] env[62820]: DEBUG nova.compute.manager [-] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1603.643385] env[62820]: DEBUG nova.network.neutron [-] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1603.678195] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec655a78-a501-4efc-a827-c9aefc304d4d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.685231] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec06c8f-36a2-4ce2-8a9e-9953908f428c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.717700] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcad71b3-960b-4b0f-a05a-d199fb9633bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.725687] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e93787a-6b1e-4d3d-80ec-2ecaf8076580 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.740013] env[62820]: DEBUG nova.compute.provider_tree [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1603.922689] env[62820]: DEBUG nova.compute.manager [req-dbe3d90a-3a22-45cf-9441-ddea156259dd req-a873ddf6-6af1-4d36-bcfc-218ae38d43dd service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received event network-changed-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1603.924029] env[62820]: DEBUG nova.compute.manager [req-dbe3d90a-3a22-45cf-9441-ddea156259dd req-a873ddf6-6af1-4d36-bcfc-218ae38d43dd service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Refreshing instance network info cache due to event network-changed-8c81ac6d-fc1a-4519-81f6-1a3a523acee9. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1603.924347] env[62820]: DEBUG oslo_concurrency.lockutils [req-dbe3d90a-3a22-45cf-9441-ddea156259dd req-a873ddf6-6af1-4d36-bcfc-218ae38d43dd service nova] Acquiring lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.924508] env[62820]: DEBUG oslo_concurrency.lockutils [req-dbe3d90a-3a22-45cf-9441-ddea156259dd req-a873ddf6-6af1-4d36-bcfc-218ae38d43dd service nova] Acquired lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.924678] env[62820]: DEBUG nova.network.neutron [req-dbe3d90a-3a22-45cf-9441-ddea156259dd req-a873ddf6-6af1-4d36-bcfc-218ae38d43dd service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Refreshing network info cache for port 8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1603.957518] env[62820]: INFO nova.compute.manager [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Took 32.69 seconds to build instance. [ 1603.967124] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.060089] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695871, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.246143] env[62820]: DEBUG nova.scheduler.client.report [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1604.458154] env[62820]: DEBUG nova.network.neutron [-] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.463580] env[62820]: DEBUG oslo_concurrency.lockutils [None req-19100deb-15e2-4cf2-a23d-5dcbbffeb00d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.094s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.513546] env[62820]: DEBUG nova.compute.manager [req-57d9b297-9d24-4707-b8dc-763ce4f0180d req-86e2fb7d-2a50-47ad-9493-be157de521fc service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Received event network-changed-1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1604.513737] env[62820]: DEBUG nova.compute.manager [req-57d9b297-9d24-4707-b8dc-763ce4f0180d req-86e2fb7d-2a50-47ad-9493-be157de521fc service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Refreshing instance network info cache due to event network-changed-1c306539-7756-458b-84e7-61bfbc0c7f35. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1604.513981] env[62820]: DEBUG oslo_concurrency.lockutils [req-57d9b297-9d24-4707-b8dc-763ce4f0180d req-86e2fb7d-2a50-47ad-9493-be157de521fc service nova] Acquiring lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.514179] env[62820]: DEBUG oslo_concurrency.lockutils [req-57d9b297-9d24-4707-b8dc-763ce4f0180d req-86e2fb7d-2a50-47ad-9493-be157de521fc service nova] Acquired lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.514242] env[62820]: DEBUG nova.network.neutron [req-57d9b297-9d24-4707-b8dc-763ce4f0180d req-86e2fb7d-2a50-47ad-9493-be157de521fc service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Refreshing network info cache for port 1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1604.560793] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695871, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.578417] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1604.578653] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1604.578854] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleting the datastore file [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1604.579124] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd19cfeb-908e-4e35-883a-38dbeef3df70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.584146] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "207efed9-20ea-4b9e-bca2-45521b41de6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.584263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "207efed9-20ea-4b9e-bca2-45521b41de6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.584423] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "207efed9-20ea-4b9e-bca2-45521b41de6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.584604] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "207efed9-20ea-4b9e-bca2-45521b41de6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.584768] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "207efed9-20ea-4b9e-bca2-45521b41de6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.587821] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1604.587821] env[62820]: value = "task-1695873" [ 1604.587821] env[62820]: _type = "Task" [ 1604.587821] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.588363] env[62820]: INFO nova.compute.manager [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Terminating instance [ 1604.599870] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.673350] env[62820]: DEBUG nova.network.neutron [req-dbe3d90a-3a22-45cf-9441-ddea156259dd req-a873ddf6-6af1-4d36-bcfc-218ae38d43dd service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updated VIF entry in instance network info cache for port 8c81ac6d-fc1a-4519-81f6-1a3a523acee9. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1604.673709] env[62820]: DEBUG nova.network.neutron [req-dbe3d90a-3a22-45cf-9441-ddea156259dd req-a873ddf6-6af1-4d36-bcfc-218ae38d43dd service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [{"id": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "address": "fa:16:3e:ed:27:27", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c81ac6d-fc", "ovs_interfaceid": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.752205] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.936s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.752205] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.882s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.758857] env[62820]: INFO nova.compute.claims [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1604.811612] env[62820]: INFO nova.scheduler.client.report [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Deleted allocations for instance 069f58d6-f6bc-4ded-8274-6fed7c2f45b3 [ 1604.965956] env[62820]: INFO nova.compute.manager [-] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Took 1.32 seconds to deallocate network for instance. [ 1605.061990] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695871, 'name': CreateVM_Task, 'duration_secs': 1.318667} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.064981] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1605.065869] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.066074] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1605.066425] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1605.067837] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-716b7b63-0e06-4f37-b1e4-5f784fc3db0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.074548] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1605.074548] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e9283c-bd07-61b9-75e1-d27e32b7e971" [ 1605.074548] env[62820]: _type = "Task" [ 1605.074548] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.085656] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e9283c-bd07-61b9-75e1-d27e32b7e971, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.099019] env[62820]: DEBUG nova.compute.manager [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1605.099019] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1605.100668] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a610162e-3b50-4f7e-a44a-29f8ac58c170 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.104389] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245143} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.104783] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1605.105110] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1605.105575] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1605.112360] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.113222] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c02c36a-8dd4-41e6-bb92-2d74ff6596da {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.119941] env[62820]: DEBUG oslo_vmware.api [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1605.119941] env[62820]: value = "task-1695874" [ 1605.119941] env[62820]: _type = "Task" [ 1605.119941] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.130068] env[62820]: DEBUG oslo_vmware.api [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.177976] env[62820]: DEBUG oslo_concurrency.lockutils [req-dbe3d90a-3a22-45cf-9441-ddea156259dd req-a873ddf6-6af1-4d36-bcfc-218ae38d43dd service nova] Releasing lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1605.287025] env[62820]: DEBUG nova.network.neutron [req-57d9b297-9d24-4707-b8dc-763ce4f0180d req-86e2fb7d-2a50-47ad-9493-be157de521fc service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updated VIF entry in instance network info cache for port 1c306539-7756-458b-84e7-61bfbc0c7f35. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1605.287629] env[62820]: DEBUG nova.network.neutron [req-57d9b297-9d24-4707-b8dc-763ce4f0180d req-86e2fb7d-2a50-47ad-9493-be157de521fc service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [{"id": "1c306539-7756-458b-84e7-61bfbc0c7f35", "address": "fa:16:3e:68:a0:af", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c306539-77", "ovs_interfaceid": "1c306539-7756-458b-84e7-61bfbc0c7f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.323345] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de7b9a1-32c9-48e2-a342-eac2440138ef tempest-ServersAdminTestJSON-1123626963 tempest-ServersAdminTestJSON-1123626963-project-member] Lock "069f58d6-f6bc-4ded-8274-6fed7c2f45b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.596s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.472921] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.586037] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e9283c-bd07-61b9-75e1-d27e32b7e971, 'name': SearchDatastore_Task, 'duration_secs': 0.012756} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.586377] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1605.586629] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1605.586857] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.587012] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1605.587202] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1605.587541] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e3323c1-1fa5-499a-9eaa-b0cd21c50be4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.600374] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1605.600667] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1605.601347] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c8000ba-2f9a-4d57-bc52-c86793047592 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.608035] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1605.608035] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5221c98f-ddc6-3159-c2a5-c0ed4f67aedb" [ 1605.608035] env[62820]: _type = "Task" [ 1605.608035] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.620773] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5221c98f-ddc6-3159-c2a5-c0ed4f67aedb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.630843] env[62820]: DEBUG oslo_vmware.api [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695874, 'name': PowerOffVM_Task, 'duration_secs': 0.218262} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.631219] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1605.631338] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1605.631542] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b491f0a6-cd52-43f9-8c1a-714e90efa07e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.743253] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1605.743480] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1605.743663] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Deleting the datastore file [datastore1] 207efed9-20ea-4b9e-bca2-45521b41de6a {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1605.743925] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-248f19c9-eea3-4ac1-8e5b-a5e94c17c768 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.750384] env[62820]: DEBUG oslo_vmware.api [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for the task: (returnval){ [ 1605.750384] env[62820]: value = "task-1695876" [ 1605.750384] env[62820]: _type = "Task" [ 1605.750384] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.757788] env[62820]: DEBUG oslo_vmware.api [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.795280] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5258a3e0-d092-1129-13b1-8f0b1393b058/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1605.795910] env[62820]: DEBUG oslo_concurrency.lockutils [req-57d9b297-9d24-4707-b8dc-763ce4f0180d req-86e2fb7d-2a50-47ad-9493-be157de521fc service nova] Releasing lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1605.796864] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1dbfd4-28ca-446e-bc5d-2e0141a1caee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.805794] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5258a3e0-d092-1129-13b1-8f0b1393b058/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1605.805906] env[62820]: ERROR oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5258a3e0-d092-1129-13b1-8f0b1393b058/disk-0.vmdk due to incomplete transfer. [ 1605.806165] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3325fd95-7635-41a6-86c2-61458205c86b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.814659] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5258a3e0-d092-1129-13b1-8f0b1393b058/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1605.814866] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Uploaded image c8fb3f1e-3f03-44f1-baf3-ae4839a4ac9f to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1605.817322] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1605.817584] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-55a6386c-8c35-49ac-bf8e-f3a3b8e07a95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.823829] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1605.823829] env[62820]: value = "task-1695877" [ 1605.823829] env[62820]: _type = "Task" [ 1605.823829] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.838099] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695877, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.956839] env[62820]: DEBUG nova.compute.manager [req-63948151-35c7-49e6-91f9-6b2112102a1c req-57be6716-c7f0-4b24-980e-2d5454351c1c service nova] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Received event network-vif-deleted-fe54bd71-0f0f-4124-aaca-84f035a9773a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1606.112036] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb091b92-d8bc-4f33-b493-365acd4376ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.130191] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868a4490-3f16-4af4-8aa6-8d7bd6fff7d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.133467] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5221c98f-ddc6-3159-c2a5-c0ed4f67aedb, 'name': SearchDatastore_Task, 'duration_secs': 0.017697} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.134602] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be30e490-5b51-4c3b-ad69-2eba23f157d0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.170349] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c86163-320e-4084-b36f-0360d403491a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.176658] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1606.176658] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520e0f1e-ebf7-ec06-1cec-1eb58a418396" [ 1606.176658] env[62820]: _type = "Task" [ 1606.176658] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.182556] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1606.182786] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1606.184146] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1606.184146] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1606.184146] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1606.184146] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1606.184146] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1606.184146] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1606.184146] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1606.184146] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1606.184505] env[62820]: DEBUG nova.virt.hardware [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1606.185213] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1217dea2-19bc-4b31-b170-b20f052aabce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.189696] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b204e6-6c83-480c-a413-eaf1180ffe56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.197518] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520e0f1e-ebf7-ec06-1cec-1eb58a418396, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.209598] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d5d844-aab7-4732-aa64-4f8a840ea848 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.213778] env[62820]: DEBUG nova.compute.provider_tree [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1606.226478] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:de:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a874c214-8cdf-4a41-a718-84262b2a28d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89fa6298-ff56-4900-8160-84554ea1e23c', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1606.234143] env[62820]: DEBUG oslo.service.loopingcall [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.235193] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1606.235423] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13a8eb8d-2a0a-40eb-970c-6294104cb0c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.256173] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1606.256173] env[62820]: value = "task-1695878" [ 1606.256173] env[62820]: _type = "Task" [ 1606.256173] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.262013] env[62820]: DEBUG oslo_vmware.api [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Task: {'id': task-1695876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33294} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.262568] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1606.262759] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1606.262964] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1606.263164] env[62820]: INFO nova.compute.manager [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1606.263401] env[62820]: DEBUG oslo.service.loopingcall [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.263587] env[62820]: DEBUG nova.compute.manager [-] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1606.263683] env[62820]: DEBUG nova.network.neutron [-] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1606.276187] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695878, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.337023] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695877, 'name': Destroy_Task, 'duration_secs': 0.385945} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.337023] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Destroyed the VM [ 1606.337023] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1606.337023] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b39d6af5-e91b-4f29-8cca-3534e9bb4902 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.342112] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1606.342112] env[62820]: value = "task-1695879" [ 1606.342112] env[62820]: _type = "Task" [ 1606.342112] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.350190] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695879, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.688027] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520e0f1e-ebf7-ec06-1cec-1eb58a418396, 'name': SearchDatastore_Task, 'duration_secs': 0.03483} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.688027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.688027] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690/e420644c-cfcc-4f8c-ae03-c9ebef585690.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1606.688027] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4569b28-0a42-4e88-92d5-c05ee015e483 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.697860] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1606.697860] env[62820]: value = "task-1695880" [ 1606.697860] env[62820]: _type = "Task" [ 1606.697860] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.704979] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.737605] env[62820]: ERROR nova.scheduler.client.report [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [req-b98bd1ec-fe62-416f-bbf1-76c796efe0e8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b98bd1ec-fe62-416f-bbf1-76c796efe0e8"}]} [ 1606.756893] env[62820]: DEBUG nova.scheduler.client.report [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1606.770629] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695878, 'name': CreateVM_Task, 'duration_secs': 0.429435} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.770866] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1606.771577] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.771825] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.772215] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1606.772502] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74424d8e-7d27-4c3b-9820-5e9abada1634 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.777630] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1606.777630] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523ccc08-1a6b-75dd-98c9-73dfbb3b3994" [ 1606.777630] env[62820]: _type = "Task" [ 1606.777630] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.782276] env[62820]: DEBUG nova.scheduler.client.report [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1606.782514] env[62820]: DEBUG nova.compute.provider_tree [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1606.791240] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523ccc08-1a6b-75dd-98c9-73dfbb3b3994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.796548] env[62820]: DEBUG nova.scheduler.client.report [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1606.817060] env[62820]: DEBUG nova.scheduler.client.report [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1606.852815] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695879, 'name': RemoveSnapshot_Task, 'duration_secs': 0.434146} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.853367] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1606.853616] env[62820]: DEBUG nova.compute.manager [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1606.854510] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872b7fe5-0a37-45d4-a8d5-3e988cf517a8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.020406] env[62820]: DEBUG nova.network.neutron [-] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.110576] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c12fbc-17ca-473f-8114-f3633705f7be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.118055] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bae6725-6dc7-4a9e-a13d-1e97ed04e6de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.148199] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302b9143-ffd3-4ff6-979a-2d569020e72f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.156070] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e65e5bb-4b75-4616-911a-4640d6d1ede7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.171077] env[62820]: DEBUG nova.compute.provider_tree [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1607.209280] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695880, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.289125] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523ccc08-1a6b-75dd-98c9-73dfbb3b3994, 'name': SearchDatastore_Task, 'duration_secs': 0.013652} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.289467] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.289737] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1607.289988] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.290154] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.290334] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1607.290618] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-105ec495-1f0b-4d5d-a505-faec5c3e049a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.306996] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1607.307194] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1607.308011] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-631f4c17-1c30-47d5-b37a-d4ffa056c488 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.313700] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1607.313700] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523b470f-900b-2717-c158-ec7e4ad30e9f" [ 1607.313700] env[62820]: _type = "Task" [ 1607.313700] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.322381] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523b470f-900b-2717-c158-ec7e4ad30e9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.369892] env[62820]: INFO nova.compute.manager [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Shelve offloading [ 1607.525766] env[62820]: INFO nova.compute.manager [-] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Took 1.26 seconds to deallocate network for instance. [ 1607.703424] env[62820]: DEBUG nova.scheduler.client.report [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1607.703934] env[62820]: DEBUG nova.compute.provider_tree [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 102 to 103 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1607.703934] env[62820]: DEBUG nova.compute.provider_tree [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1607.712720] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695880, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.946382} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.712993] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690/e420644c-cfcc-4f8c-ae03-c9ebef585690.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1607.713226] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1607.713481] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85b241dd-d26e-4fb7-9dac-d7f5ab32a97e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.720327] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1607.720327] env[62820]: value = "task-1695881" [ 1607.720327] env[62820]: _type = "Task" [ 1607.720327] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.729371] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695881, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.827947] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523b470f-900b-2717-c158-ec7e4ad30e9f, 'name': SearchDatastore_Task, 'duration_secs': 0.053986} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.828911] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a792ab6e-d4ae-46b7-92d7-d993ed44b4f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.834211] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1607.834211] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52493168-1835-40e3-67b0-b9103882a0ef" [ 1607.834211] env[62820]: _type = "Task" [ 1607.834211] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.841829] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52493168-1835-40e3-67b0-b9103882a0ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.873410] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1607.873800] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-614e0c90-34b1-489c-97d3-dc5acfb653a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.880642] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1607.880642] env[62820]: value = "task-1695882" [ 1607.880642] env[62820]: _type = "Task" [ 1607.880642] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.888972] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.033375] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.069411] env[62820]: DEBUG nova.compute.manager [req-d1c0a060-8453-4206-827f-4288aab6142f req-6493c014-70fe-4539-a6e9-9d867fbd4906 service nova] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Received event network-vif-deleted-fb9a90bf-d141-401b-84c1-af8a103dc37e {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1608.209687] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.458s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.209881] env[62820]: DEBUG nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1608.213263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.767s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.213486] env[62820]: DEBUG nova.objects.instance [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lazy-loading 'resources' on Instance uuid 3a325dbf-87fb-4f7e-a665-e5d181333a5c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1608.230852] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695881, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068046} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.230852] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1608.231466] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a7e713-ca40-41a6-a818-b7618d9f64f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.255806] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690/e420644c-cfcc-4f8c-ae03-c9ebef585690.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1608.256637] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34b10754-0a0d-4420-aea3-89e226adaa16 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.276811] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1608.276811] env[62820]: value = "task-1695883" [ 1608.276811] env[62820]: _type = "Task" [ 1608.276811] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.285890] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.344717] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52493168-1835-40e3-67b0-b9103882a0ef, 'name': SearchDatastore_Task, 'duration_secs': 0.03484} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.345059] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.345376] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970/ba5b0055-b756-4f80-ba6b-7e8b705d2970.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1608.345642] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-914dadfe-1c44-410d-b06d-c4c7bc296fea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.352769] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1608.352769] env[62820]: value = "task-1695884" [ 1608.352769] env[62820]: _type = "Task" [ 1608.352769] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.360140] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.390033] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1608.390152] env[62820]: DEBUG nova.compute.manager [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1608.390927] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e6df43-92e6-4a71-944d-40feac807561 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.396940] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.397126] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.397404] env[62820]: DEBUG nova.network.neutron [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1608.715043] env[62820]: DEBUG nova.compute.utils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1608.716014] env[62820]: DEBUG nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1608.716206] env[62820]: DEBUG nova.network.neutron [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1608.785277] env[62820]: DEBUG nova.policy [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '60f0ddaebd9044c1acc5a3d78cc76de3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bcab027f7124127ba9c8209703129d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1608.793476] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695883, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.865944] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695884, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.093730] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d36150c-6627-4b32-a292-55e20373a05f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.104963] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d73233-2f7a-4f18-bbd6-24659691700d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.145926] env[62820]: DEBUG nova.network.neutron [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Successfully created port: 22b45c92-acc9-495a-97db-769521fc7c3b {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1609.150577] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f7b3ca-02ed-4a62-8546-f41b975fb42b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.160679] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f96640a-c833-498d-8622-811ef269e643 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.179725] env[62820]: DEBUG nova.compute.provider_tree [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1609.201529] env[62820]: DEBUG nova.network.neutron [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Updating instance_info_cache with network_info: [{"id": "ff29c319-f707-464e-83f5-4df60b1eb8df", "address": "fa:16:3e:f0:47:4e", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff29c319-f7", "ovs_interfaceid": "ff29c319-f707-464e-83f5-4df60b1eb8df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.221855] env[62820]: DEBUG nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1609.292486] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695883, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.371022] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.871702} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.371022] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970/ba5b0055-b756-4f80-ba6b-7e8b705d2970.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1609.371022] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1609.371022] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44760834-0d44-4005-8efc-fff1e9dd42af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.379452] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1609.379452] env[62820]: value = "task-1695885" [ 1609.379452] env[62820]: _type = "Task" [ 1609.379452] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.390999] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695885, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.683025] env[62820]: DEBUG nova.scheduler.client.report [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1609.704501] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.791217] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695883, 'name': ReconfigVM_Task, 'duration_secs': 1.015671} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.791217] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Reconfigured VM instance instance-00000048 to attach disk [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690/e420644c-cfcc-4f8c-ae03-c9ebef585690.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1609.791217] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-100ae59e-5102-4b1e-b455-c9a6ec600cdf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.800394] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1609.800394] env[62820]: value = "task-1695886" [ 1609.800394] env[62820]: _type = "Task" [ 1609.800394] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.813234] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695886, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.891090] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695885, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086631} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.891841] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1609.892758] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65da441b-53fa-457d-b3c9-0700ad68ea55 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.929770] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970/ba5b0055-b756-4f80-ba6b-7e8b705d2970.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1609.929770] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a26b8dc0-8716-46cc-b826-a4beed3f74a8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.952189] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1609.952189] env[62820]: value = "task-1695887" [ 1609.952189] env[62820]: _type = "Task" [ 1609.952189] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.963017] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695887, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.188837] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.975s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.191853] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.401s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.192171] env[62820]: DEBUG nova.objects.instance [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1610.232867] env[62820]: INFO nova.scheduler.client.report [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Deleted allocations for instance 3a325dbf-87fb-4f7e-a665-e5d181333a5c [ 1610.233717] env[62820]: DEBUG nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1610.267282] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1610.267631] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1610.267827] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1610.268209] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1610.268405] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1610.269613] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1610.269613] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1610.269613] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1610.269613] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1610.269613] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1610.269613] env[62820]: DEBUG nova.virt.hardware [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1610.270786] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b105b5e6-d1ae-4e9e-8656-d4c79acb0f20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.280396] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db73252-0b51-4308-b071-9c929530e690 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.308891] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695886, 'name': Rename_Task, 'duration_secs': 0.380699} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.309359] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1610.309618] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23901d1c-ebf6-49dc-9c6f-5f06d20e7c61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.315936] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1610.315936] env[62820]: value = "task-1695888" [ 1610.315936] env[62820]: _type = "Task" [ 1610.315936] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.323814] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.365947] env[62820]: DEBUG nova.compute.manager [req-2ef4a6c9-d64d-439a-8cd1-456e8fb18db6 req-c8599995-7b74-48ce-946b-4af2a5a03d7a service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Received event network-vif-unplugged-ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1610.366184] env[62820]: DEBUG oslo_concurrency.lockutils [req-2ef4a6c9-d64d-439a-8cd1-456e8fb18db6 req-c8599995-7b74-48ce-946b-4af2a5a03d7a service nova] Acquiring lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.366393] env[62820]: DEBUG oslo_concurrency.lockutils [req-2ef4a6c9-d64d-439a-8cd1-456e8fb18db6 req-c8599995-7b74-48ce-946b-4af2a5a03d7a service nova] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.366558] env[62820]: DEBUG oslo_concurrency.lockutils [req-2ef4a6c9-d64d-439a-8cd1-456e8fb18db6 req-c8599995-7b74-48ce-946b-4af2a5a03d7a service nova] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.366852] env[62820]: DEBUG nova.compute.manager [req-2ef4a6c9-d64d-439a-8cd1-456e8fb18db6 req-c8599995-7b74-48ce-946b-4af2a5a03d7a service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] No waiting events found dispatching network-vif-unplugged-ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1610.367078] env[62820]: WARNING nova.compute.manager [req-2ef4a6c9-d64d-439a-8cd1-456e8fb18db6 req-c8599995-7b74-48ce-946b-4af2a5a03d7a service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Received unexpected event network-vif-unplugged-ff29c319-f707-464e-83f5-4df60b1eb8df for instance with vm_state shelved and task_state shelving_offloading. [ 1610.426808] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1610.428278] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0539ffac-0551-4be8-a6cb-a9aac69b7c70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.437736] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1610.437991] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbcc8db8-d98e-4bbf-bc90-f83ef8cdc406 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.464641] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695887, 'name': ReconfigVM_Task, 'duration_secs': 0.348684} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.464939] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Reconfigured VM instance instance-00000046 to attach disk [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970/ba5b0055-b756-4f80-ba6b-7e8b705d2970.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1610.465761] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21057292-35ef-479f-a9a8-e41ec687d37c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.484555] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1610.484555] env[62820]: value = "task-1695890" [ 1610.484555] env[62820]: _type = "Task" [ 1610.484555] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.494019] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695890, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.537391] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1610.537702] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1610.537912] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleting the datastore file [datastore1] 4ac8c3b8-e5e5-4a74-a430-a88e856b705e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1610.538342] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d2010c8-4b57-4fc1-b18d-7dd09af9b32d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.549181] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1610.549181] env[62820]: value = "task-1695891" [ 1610.549181] env[62820]: _type = "Task" [ 1610.549181] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.564531] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.747166] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ca0840b1-4512-48cd-b257-ac01d8fa2754 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "3a325dbf-87fb-4f7e-a665-e5d181333a5c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.727s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.799286] env[62820]: DEBUG nova.compute.manager [req-94ba07f5-1d51-4a63-891f-06cb15abae5a req-14ca482b-13a4-4b6d-9017-df840d7f3f7c service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Received event network-vif-plugged-22b45c92-acc9-495a-97db-769521fc7c3b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1610.799569] env[62820]: DEBUG oslo_concurrency.lockutils [req-94ba07f5-1d51-4a63-891f-06cb15abae5a req-14ca482b-13a4-4b6d-9017-df840d7f3f7c service nova] Acquiring lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.799739] env[62820]: DEBUG oslo_concurrency.lockutils [req-94ba07f5-1d51-4a63-891f-06cb15abae5a req-14ca482b-13a4-4b6d-9017-df840d7f3f7c service nova] Lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.800180] env[62820]: DEBUG oslo_concurrency.lockutils [req-94ba07f5-1d51-4a63-891f-06cb15abae5a req-14ca482b-13a4-4b6d-9017-df840d7f3f7c service nova] Lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.800448] env[62820]: DEBUG nova.compute.manager [req-94ba07f5-1d51-4a63-891f-06cb15abae5a req-14ca482b-13a4-4b6d-9017-df840d7f3f7c service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] No waiting events found dispatching network-vif-plugged-22b45c92-acc9-495a-97db-769521fc7c3b {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1610.800658] env[62820]: WARNING nova.compute.manager [req-94ba07f5-1d51-4a63-891f-06cb15abae5a req-14ca482b-13a4-4b6d-9017-df840d7f3f7c service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Received unexpected event network-vif-plugged-22b45c92-acc9-495a-97db-769521fc7c3b for instance with vm_state building and task_state spawning. [ 1610.828079] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695888, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.881542] env[62820]: DEBUG nova.network.neutron [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Successfully updated port: 22b45c92-acc9-495a-97db-769521fc7c3b {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1610.995272] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695890, 'name': Rename_Task, 'duration_secs': 0.181214} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.995703] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1610.996141] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e5d10b6-cf97-40e4-87f0-a9179b75b5f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.004038] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1611.004038] env[62820]: value = "task-1695892" [ 1611.004038] env[62820]: _type = "Task" [ 1611.004038] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.015391] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.058962] env[62820]: DEBUG oslo_vmware.api [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1695891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218517} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.059246] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1611.059436] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1611.059611] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1611.081733] env[62820]: INFO nova.scheduler.client.report [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted allocations for instance 4ac8c3b8-e5e5-4a74-a430-a88e856b705e [ 1611.203115] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6168473f-a55f-4c4a-9cbd-ee684306086e tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.204272] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.133s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.205807] env[62820]: INFO nova.compute.claims [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1611.327976] env[62820]: DEBUG oslo_vmware.api [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695888, 'name': PowerOnVM_Task, 'duration_secs': 0.632559} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.328277] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1611.328485] env[62820]: INFO nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Took 10.68 seconds to spawn the instance on the hypervisor. [ 1611.328686] env[62820]: DEBUG nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1611.329484] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2117b8-9c0f-471a-9c01-fa0be3563322 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.384550] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquiring lock "refresh_cache-c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.384701] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquired lock "refresh_cache-c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.384864] env[62820]: DEBUG nova.network.neutron [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1611.515968] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695892, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.586744] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.850256] env[62820]: INFO nova.compute.manager [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Took 33.49 seconds to build instance. [ 1611.898502] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "46434419-d6de-4cc1-905c-14698512b7a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.898725] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "46434419-d6de-4cc1-905c-14698512b7a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.941965] env[62820]: DEBUG nova.network.neutron [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1612.016951] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695892, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.104789] env[62820]: DEBUG nova.network.neutron [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Updating instance_info_cache with network_info: [{"id": "22b45c92-acc9-495a-97db-769521fc7c3b", "address": "fa:16:3e:bc:c7:08", "network": {"id": "a774e1d0-5791-4d30-a9d9-3bf5cc707f62", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1441795595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bcab027f7124127ba9c8209703129d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22b45c92-ac", "ovs_interfaceid": "22b45c92-acc9-495a-97db-769521fc7c3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.150262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.150513] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.178984] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.352386] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f51ba2a-2bda-4ef1-8ef6-14ea2e033183 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.996s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.391336] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.391687] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.396574] env[62820]: DEBUG nova.compute.manager [req-f9710dd6-a165-416f-8bd4-144f1b3a1699 req-42cf626e-8f75-4cf7-a025-b35f5108a79f service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Received event network-changed-ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1612.396574] env[62820]: DEBUG nova.compute.manager [req-f9710dd6-a165-416f-8bd4-144f1b3a1699 req-42cf626e-8f75-4cf7-a025-b35f5108a79f service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Refreshing instance network info cache due to event network-changed-ff29c319-f707-464e-83f5-4df60b1eb8df. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1612.396574] env[62820]: DEBUG oslo_concurrency.lockutils [req-f9710dd6-a165-416f-8bd4-144f1b3a1699 req-42cf626e-8f75-4cf7-a025-b35f5108a79f service nova] Acquiring lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.396574] env[62820]: DEBUG oslo_concurrency.lockutils [req-f9710dd6-a165-416f-8bd4-144f1b3a1699 req-42cf626e-8f75-4cf7-a025-b35f5108a79f service nova] Acquired lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.396574] env[62820]: DEBUG nova.network.neutron [req-f9710dd6-a165-416f-8bd4-144f1b3a1699 req-42cf626e-8f75-4cf7-a025-b35f5108a79f service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Refreshing network info cache for port ff29c319-f707-464e-83f5-4df60b1eb8df {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1612.400613] env[62820]: DEBUG nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1612.504429] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04572130-ded3-4c0a-bd98-fabbfd73a308 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.519645] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7751d566-eb81-42f1-b354-03e13c7d52de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.522738] env[62820]: DEBUG oslo_vmware.api [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695892, 'name': PowerOnVM_Task, 'duration_secs': 1.110365} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.523009] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1612.523227] env[62820]: DEBUG nova.compute.manager [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1612.524279] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962a6e2c-16ef-4847-8415-4c73f8461551 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.551221] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7d1ee6-4f2c-4575-a099-c05e0f9ffbd2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.565418] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c52a7d7-8da1-48ef-98d6-e07c59d1079b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.579698] env[62820]: DEBUG nova.compute.provider_tree [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1612.607902] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Releasing lock "refresh_cache-c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.608243] env[62820]: DEBUG nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Instance network_info: |[{"id": "22b45c92-acc9-495a-97db-769521fc7c3b", "address": "fa:16:3e:bc:c7:08", "network": {"id": "a774e1d0-5791-4d30-a9d9-3bf5cc707f62", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1441795595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bcab027f7124127ba9c8209703129d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22b45c92-ac", "ovs_interfaceid": "22b45c92-acc9-495a-97db-769521fc7c3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1612.608684] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:c7:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f87a752-ebb0-49a4-a67b-e356fa45b89b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22b45c92-acc9-495a-97db-769521fc7c3b', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1612.616073] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Creating folder: Project (4bcab027f7124127ba9c8209703129d1). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1612.616345] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84f9cbbe-12cb-4e54-863e-2d16684d0d80 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.619781] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "492db939-78f4-4642-89dd-a01fa94f41b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.620015] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "492db939-78f4-4642-89dd-a01fa94f41b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.620218] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "492db939-78f4-4642-89dd-a01fa94f41b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.620835] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "492db939-78f4-4642-89dd-a01fa94f41b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.620835] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "492db939-78f4-4642-89dd-a01fa94f41b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.622800] env[62820]: INFO nova.compute.manager [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Terminating instance [ 1612.629097] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Created folder: Project (4bcab027f7124127ba9c8209703129d1) in parent group-v353379. [ 1612.629364] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Creating folder: Instances. Parent ref: group-v353593. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1612.629694] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b5a8d67-7eed-431e-af28-150680a2b104 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.639971] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Created folder: Instances in parent group-v353593. [ 1612.640225] env[62820]: DEBUG oslo.service.loopingcall [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1612.640413] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1612.640653] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe4ec9b7-dc41-4df9-898f-9c2bd75efcb2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.656500] env[62820]: DEBUG nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1612.664355] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1612.664355] env[62820]: value = "task-1695895" [ 1612.664355] env[62820]: _type = "Task" [ 1612.664355] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.679449] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695895, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.825381] env[62820]: DEBUG nova.compute.manager [req-0786a667-982e-4353-8624-41a9e27b48e5 req-8fa04ca3-e695-4d49-a8a5-e4b3d00c153f service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Received event network-changed-22b45c92-acc9-495a-97db-769521fc7c3b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1612.825490] env[62820]: DEBUG nova.compute.manager [req-0786a667-982e-4353-8624-41a9e27b48e5 req-8fa04ca3-e695-4d49-a8a5-e4b3d00c153f service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Refreshing instance network info cache due to event network-changed-22b45c92-acc9-495a-97db-769521fc7c3b. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1612.825628] env[62820]: DEBUG oslo_concurrency.lockutils [req-0786a667-982e-4353-8624-41a9e27b48e5 req-8fa04ca3-e695-4d49-a8a5-e4b3d00c153f service nova] Acquiring lock "refresh_cache-c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.825760] env[62820]: DEBUG oslo_concurrency.lockutils [req-0786a667-982e-4353-8624-41a9e27b48e5 req-8fa04ca3-e695-4d49-a8a5-e4b3d00c153f service nova] Acquired lock "refresh_cache-c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.825920] env[62820]: DEBUG nova.network.neutron [req-0786a667-982e-4353-8624-41a9e27b48e5 req-8fa04ca3-e695-4d49-a8a5-e4b3d00c153f service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Refreshing network info cache for port 22b45c92-acc9-495a-97db-769521fc7c3b {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1612.894184] env[62820]: DEBUG nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1612.922883] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.068373] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.082597] env[62820]: DEBUG nova.scheduler.client.report [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1613.127419] env[62820]: DEBUG nova.compute.manager [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1613.127905] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1613.128865] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8543560-c969-4775-848c-87b12ca85c71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.139797] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1613.140150] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82575d44-d4d9-4a62-a1fa-72bf6e160edb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.147084] env[62820]: DEBUG oslo_vmware.api [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1613.147084] env[62820]: value = "task-1695896" [ 1613.147084] env[62820]: _type = "Task" [ 1613.147084] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.156479] env[62820]: DEBUG oslo_vmware.api [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.176317] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695895, 'name': CreateVM_Task, 'duration_secs': 0.379041} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.177819] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.178082] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1613.178820] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.179065] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.179443] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1613.179750] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31fa6c7a-752b-460c-859f-c8f6dfb9bc37 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.187021] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1613.187021] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5233551b-702c-bbd8-a45f-240a6391c69f" [ 1613.187021] env[62820]: _type = "Task" [ 1613.187021] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.187021] env[62820]: DEBUG nova.network.neutron [req-f9710dd6-a165-416f-8bd4-144f1b3a1699 req-42cf626e-8f75-4cf7-a025-b35f5108a79f service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Updated VIF entry in instance network info cache for port ff29c319-f707-464e-83f5-4df60b1eb8df. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1613.187021] env[62820]: DEBUG nova.network.neutron [req-f9710dd6-a165-416f-8bd4-144f1b3a1699 req-42cf626e-8f75-4cf7-a025-b35f5108a79f service nova] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Updating instance_info_cache with network_info: [{"id": "ff29c319-f707-464e-83f5-4df60b1eb8df", "address": "fa:16:3e:f0:47:4e", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": null, "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapff29c319-f7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.196736] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5233551b-702c-bbd8-a45f-240a6391c69f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.288888] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.288888] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.418346] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.551610] env[62820]: DEBUG nova.network.neutron [req-0786a667-982e-4353-8624-41a9e27b48e5 req-8fa04ca3-e695-4d49-a8a5-e4b3d00c153f service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Updated VIF entry in instance network info cache for port 22b45c92-acc9-495a-97db-769521fc7c3b. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1613.551610] env[62820]: DEBUG nova.network.neutron [req-0786a667-982e-4353-8624-41a9e27b48e5 req-8fa04ca3-e695-4d49-a8a5-e4b3d00c153f service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Updating instance_info_cache with network_info: [{"id": "22b45c92-acc9-495a-97db-769521fc7c3b", "address": "fa:16:3e:bc:c7:08", "network": {"id": "a774e1d0-5791-4d30-a9d9-3bf5cc707f62", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1441795595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bcab027f7124127ba9c8209703129d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22b45c92-ac", "ovs_interfaceid": "22b45c92-acc9-495a-97db-769521fc7c3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.587989] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.588656] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1613.591537] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.491s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.593740] env[62820]: INFO nova.compute.claims [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1613.658736] env[62820]: DEBUG oslo_vmware.api [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695896, 'name': PowerOffVM_Task, 'duration_secs': 0.22569} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.658950] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1613.659141] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1613.659392] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdc188ad-3f60-4be9-a150-78d46f0fd094 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.692660] env[62820]: DEBUG oslo_concurrency.lockutils [req-f9710dd6-a165-416f-8bd4-144f1b3a1699 req-42cf626e-8f75-4cf7-a025-b35f5108a79f service nova] Releasing lock "refresh_cache-4ac8c3b8-e5e5-4a74-a430-a88e856b705e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.700579] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5233551b-702c-bbd8-a45f-240a6391c69f, 'name': SearchDatastore_Task, 'duration_secs': 0.013005} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.700942] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.701143] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1613.701382] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.701531] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.701708] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1613.701969] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ad99c83-70fe-410a-aa12-e503affd3217 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.711603] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1613.711775] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1613.712528] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-810bc613-34dd-4a75-9422-78ce44da9b83 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.718216] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1613.718216] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bd28bc-17e9-8c2b-92fd-e7b9ec028dff" [ 1613.718216] env[62820]: _type = "Task" [ 1613.718216] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.727488] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bd28bc-17e9-8c2b-92fd-e7b9ec028dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.738305] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1613.738523] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1613.738767] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Deleting the datastore file [datastore1] 492db939-78f4-4642-89dd-a01fa94f41b5 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1613.739144] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-553be38a-163c-42c3-8e74-a0ee32a13b87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.746619] env[62820]: DEBUG oslo_vmware.api [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for the task: (returnval){ [ 1613.746619] env[62820]: value = "task-1695898" [ 1613.746619] env[62820]: _type = "Task" [ 1613.746619] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.754933] env[62820]: DEBUG oslo_vmware.api [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695898, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.790235] env[62820]: DEBUG nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1613.902976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.902976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.902976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.902976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.902976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.904913] env[62820]: INFO nova.compute.manager [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Terminating instance [ 1614.053903] env[62820]: DEBUG oslo_concurrency.lockutils [req-0786a667-982e-4353-8624-41a9e27b48e5 req-8fa04ca3-e695-4d49-a8a5-e4b3d00c153f service nova] Releasing lock "refresh_cache-c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.098662] env[62820]: DEBUG nova.compute.utils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1614.101820] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1614.101995] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1614.149026] env[62820]: DEBUG nova.policy [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e757d7fd8474b04903db4fb76781717', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c944a7dcf084460f9fb13731534ed788', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1614.230664] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bd28bc-17e9-8c2b-92fd-e7b9ec028dff, 'name': SearchDatastore_Task, 'duration_secs': 0.013803} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.231810] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8772f2e6-273d-485e-a0d8-369e08d0c28c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.238538] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1614.238538] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523b66ee-99a7-71b4-ca3f-63a69d7914d3" [ 1614.238538] env[62820]: _type = "Task" [ 1614.238538] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.248292] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523b66ee-99a7-71b4-ca3f-63a69d7914d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.256437] env[62820]: DEBUG oslo_vmware.api [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Task: {'id': task-1695898, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.385933} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.256691] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1614.256875] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1614.257085] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1614.257237] env[62820]: INFO nova.compute.manager [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1614.257505] env[62820]: DEBUG oslo.service.loopingcall [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1614.257710] env[62820]: DEBUG nova.compute.manager [-] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1614.257807] env[62820]: DEBUG nova.network.neutron [-] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1614.320582] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.407046] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.407296] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.411812] env[62820]: DEBUG nova.compute.manager [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1614.411812] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1614.412432] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fc4ca4-8f60-4c8b-a49d-c62e0a24547f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.421545] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1614.421765] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19a7705d-1e97-4541-9c53-85e355e50d18 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.429963] env[62820]: DEBUG oslo_vmware.api [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1614.429963] env[62820]: value = "task-1695899" [ 1614.429963] env[62820]: _type = "Task" [ 1614.429963] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.442312] env[62820]: DEBUG oslo_vmware.api [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695899, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.584907] env[62820]: DEBUG nova.compute.manager [req-000fe0ff-8692-46cd-ad2b-fa4993743058 req-71e1dec4-15fa-4966-b2ad-e9ee92735bd3 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Received event network-vif-deleted-2c04e03b-ab62-4610-b33b-f1d00be3b4be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1614.584907] env[62820]: INFO nova.compute.manager [req-000fe0ff-8692-46cd-ad2b-fa4993743058 req-71e1dec4-15fa-4966-b2ad-e9ee92735bd3 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Neutron deleted interface 2c04e03b-ab62-4610-b33b-f1d00be3b4be; detaching it from the instance and deleting it from the info cache [ 1614.584979] env[62820]: DEBUG nova.network.neutron [req-000fe0ff-8692-46cd-ad2b-fa4993743058 req-71e1dec4-15fa-4966-b2ad-e9ee92735bd3 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.602430] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1614.701614] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Successfully created port: 41144e4b-bdb5-419e-902a-b56903f292fc {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1614.751626] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523b66ee-99a7-71b4-ca3f-63a69d7914d3, 'name': SearchDatastore_Task, 'duration_secs': 0.023935} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.752220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.752220] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c0d14c00-2c93-490c-8b17-91d3b5ee5b3d/c0d14c00-2c93-490c-8b17-91d3b5ee5b3d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1614.752450] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1eccc510-4d6d-4c4e-a17f-fd0b5087452f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.762940] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1614.762940] env[62820]: value = "task-1695900" [ 1614.762940] env[62820]: _type = "Task" [ 1614.762940] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.776712] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695900, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.911490] env[62820]: DEBUG nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1614.940460] env[62820]: DEBUG oslo_vmware.api [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695899, 'name': PowerOffVM_Task, 'duration_secs': 0.285387} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.943622] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1614.943622] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1614.943754] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1edb8787-9cbf-4864-9917-73c6b7877b35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.997680] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4d443b-a299-427d-ba14-5d86ed68196f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.005888] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db0a370-d3a7-4dc4-982b-b5cd79b2fd58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.039232] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da662e4d-0599-4bd7-b29f-98de83f249cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.041907] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1615.042142] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1615.042321] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleting the datastore file [datastore1] ba5b0055-b756-4f80-ba6b-7e8b705d2970 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1615.042565] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-932aa257-1f00-4ca0-9951-b1a773a0de21 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.052643] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7717c87d-ad3e-4ac1-b09e-a29579252a61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.056720] env[62820]: DEBUG oslo_vmware.api [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1615.056720] env[62820]: value = "task-1695902" [ 1615.056720] env[62820]: _type = "Task" [ 1615.056720] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.067697] env[62820]: DEBUG nova.network.neutron [-] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.069763] env[62820]: DEBUG nova.compute.provider_tree [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.077402] env[62820]: DEBUG oslo_vmware.api [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.088065] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-202eb589-47a0-4c9e-8f67-ba24faf2ebcb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.098727] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83870c5-ca88-486f-b685-05630f4b5d30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.138050] env[62820]: DEBUG nova.compute.manager [req-000fe0ff-8692-46cd-ad2b-fa4993743058 req-71e1dec4-15fa-4966-b2ad-e9ee92735bd3 service nova] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Detach interface failed, port_id=2c04e03b-ab62-4610-b33b-f1d00be3b4be, reason: Instance 492db939-78f4-4642-89dd-a01fa94f41b5 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1615.275053] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695900, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.436321] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.570887] env[62820]: INFO nova.compute.manager [-] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Took 1.31 seconds to deallocate network for instance. [ 1615.571325] env[62820]: DEBUG oslo_vmware.api [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.578306] env[62820]: DEBUG nova.scheduler.client.report [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1615.615444] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1615.643275] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1615.643544] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1615.643706] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1615.643888] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1615.644047] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1615.644206] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1615.644410] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1615.644573] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1615.644738] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1615.644941] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1615.645090] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1615.646282] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc18678-bf50-4dc6-92a6-f82c35578a00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.654581] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef435a1f-1e1a-47d2-9a23-ea05a3fb9053 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.774566] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695900, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.997649} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.774868] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c0d14c00-2c93-490c-8b17-91d3b5ee5b3d/c0d14c00-2c93-490c-8b17-91d3b5ee5b3d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1615.775129] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1615.775359] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a3bf199-6f12-4b15-9290-2713bc00d753 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.781843] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1615.781843] env[62820]: value = "task-1695903" [ 1615.781843] env[62820]: _type = "Task" [ 1615.781843] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.790691] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.068231] env[62820]: DEBUG oslo_vmware.api [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.078445] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.084359] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.084855] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1616.087933] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.141s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.088212] env[62820]: DEBUG nova.objects.instance [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lazy-loading 'resources' on Instance uuid 2587a273-0115-483a-ba5e-994c87bbc4d0 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1616.177217] env[62820]: DEBUG nova.compute.manager [req-3453a8da-505e-44b7-bf94-9de7011e5724 req-41993fd5-afa2-48ec-9516-7dadb39413e4 service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Received event network-vif-plugged-41144e4b-bdb5-419e-902a-b56903f292fc {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1616.177217] env[62820]: DEBUG oslo_concurrency.lockutils [req-3453a8da-505e-44b7-bf94-9de7011e5724 req-41993fd5-afa2-48ec-9516-7dadb39413e4 service nova] Acquiring lock "361b7da3-0e8c-4291-aba0-8b6116b8032f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.177217] env[62820]: DEBUG oslo_concurrency.lockutils [req-3453a8da-505e-44b7-bf94-9de7011e5724 req-41993fd5-afa2-48ec-9516-7dadb39413e4 service nova] Lock "361b7da3-0e8c-4291-aba0-8b6116b8032f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.177527] env[62820]: DEBUG oslo_concurrency.lockutils [req-3453a8da-505e-44b7-bf94-9de7011e5724 req-41993fd5-afa2-48ec-9516-7dadb39413e4 service nova] Lock "361b7da3-0e8c-4291-aba0-8b6116b8032f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.177527] env[62820]: DEBUG nova.compute.manager [req-3453a8da-505e-44b7-bf94-9de7011e5724 req-41993fd5-afa2-48ec-9516-7dadb39413e4 service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] No waiting events found dispatching network-vif-plugged-41144e4b-bdb5-419e-902a-b56903f292fc {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1616.177703] env[62820]: WARNING nova.compute.manager [req-3453a8da-505e-44b7-bf94-9de7011e5724 req-41993fd5-afa2-48ec-9516-7dadb39413e4 service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Received unexpected event network-vif-plugged-41144e4b-bdb5-419e-902a-b56903f292fc for instance with vm_state building and task_state spawning. [ 1616.277309] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Successfully updated port: 41144e4b-bdb5-419e-902a-b56903f292fc {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1616.292315] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076642} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.292583] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1616.293414] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bb0895-878f-43b9-a71c-6d40d7e3de32 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.316428] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] c0d14c00-2c93-490c-8b17-91d3b5ee5b3d/c0d14c00-2c93-490c-8b17-91d3b5ee5b3d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1616.317011] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad4b7bb6-ecfc-4e8e-9240-3880ad15941b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.337614] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1616.337614] env[62820]: value = "task-1695904" [ 1616.337614] env[62820]: _type = "Task" [ 1616.337614] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.345598] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695904, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.570575] env[62820]: DEBUG oslo_vmware.api [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.076626} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.571035] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1616.571642] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1616.571642] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1616.571865] env[62820]: INFO nova.compute.manager [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Took 2.16 seconds to destroy the instance on the hypervisor. [ 1616.572131] env[62820]: DEBUG oslo.service.loopingcall [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1616.572331] env[62820]: DEBUG nova.compute.manager [-] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1616.572428] env[62820]: DEBUG nova.network.neutron [-] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1616.591557] env[62820]: DEBUG nova.compute.utils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1616.595730] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1616.595912] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1616.637409] env[62820]: DEBUG nova.policy [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e757d7fd8474b04903db4fb76781717', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c944a7dcf084460f9fb13731534ed788', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1616.780022] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "refresh_cache-361b7da3-0e8c-4291-aba0-8b6116b8032f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.780179] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "refresh_cache-361b7da3-0e8c-4291-aba0-8b6116b8032f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.780335] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1616.854221] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695904, 'name': ReconfigVM_Task, 'duration_secs': 0.405768} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.855106] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Reconfigured VM instance instance-00000049 to attach disk [datastore1] c0d14c00-2c93-490c-8b17-91d3b5ee5b3d/c0d14c00-2c93-490c-8b17-91d3b5ee5b3d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1616.855106] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04ae1c1e-0ffc-4fc6-9988-b3afd8fdee5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.864301] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1616.864301] env[62820]: value = "task-1695905" [ 1616.864301] env[62820]: _type = "Task" [ 1616.864301] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.874889] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695905, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.947211] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Successfully created port: 1d5518da-f98f-4610-94c2-bf2a0a4f8499 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1616.984509] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3086668-7212-4446-b108-66f26d634912 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.993069] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ff85f7-d3ae-40de-a383-a2c25d8417b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.032014] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b152175a-c8a5-41ac-b43a-7f7bbe1b0f94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.038189] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f229f03-1bdd-44c6-b88a-21f8bf185418 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.052352] env[62820]: DEBUG nova.compute.provider_tree [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.096623] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1617.311210] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1617.362525] env[62820]: DEBUG nova.network.neutron [-] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.374720] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695905, 'name': Rename_Task, 'duration_secs': 0.138143} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.378101] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1617.378733] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c25f731-63ff-4afc-a896-0150bbc6ca50 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.386336] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1617.386336] env[62820]: value = "task-1695906" [ 1617.386336] env[62820]: _type = "Task" [ 1617.386336] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.396099] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.446722] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Updating instance_info_cache with network_info: [{"id": "41144e4b-bdb5-419e-902a-b56903f292fc", "address": "fa:16:3e:81:93:6a", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41144e4b-bd", "ovs_interfaceid": "41144e4b-bdb5-419e-902a-b56903f292fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.555703] env[62820]: DEBUG nova.scheduler.client.report [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1617.865334] env[62820]: INFO nova.compute.manager [-] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Took 1.29 seconds to deallocate network for instance. [ 1617.897997] env[62820]: DEBUG oslo_vmware.api [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695906, 'name': PowerOnVM_Task, 'duration_secs': 0.434782} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.899382] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1617.899624] env[62820]: INFO nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Took 7.67 seconds to spawn the instance on the hypervisor. [ 1617.899936] env[62820]: DEBUG nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1617.900972] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189d968c-a1ed-487d-94a5-2115fdfde2cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.949070] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "refresh_cache-361b7da3-0e8c-4291-aba0-8b6116b8032f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.950056] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Instance network_info: |[{"id": "41144e4b-bdb5-419e-902a-b56903f292fc", "address": "fa:16:3e:81:93:6a", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41144e4b-bd", "ovs_interfaceid": "41144e4b-bdb5-419e-902a-b56903f292fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1617.950056] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:93:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41144e4b-bdb5-419e-902a-b56903f292fc', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1617.957326] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Creating folder: Project (c944a7dcf084460f9fb13731534ed788). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1617.958029] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76a651b5-2d93-44f9-8ca1-c56bb057613f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.972047] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Created folder: Project (c944a7dcf084460f9fb13731534ed788) in parent group-v353379. [ 1617.972260] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Creating folder: Instances. Parent ref: group-v353596. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1617.972577] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8cec959-7a73-4d32-b454-7948b7a6f4aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.983509] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Created folder: Instances in parent group-v353596. [ 1617.984198] env[62820]: DEBUG oslo.service.loopingcall [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.984486] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1617.984755] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e3257a9-dfcb-47c6-847c-399c0a4651fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.004995] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1618.004995] env[62820]: value = "task-1695909" [ 1618.004995] env[62820]: _type = "Task" [ 1618.004995] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.013022] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695909, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.061584] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.974s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.064677] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.098s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.066355] env[62820]: INFO nova.compute.claims [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1618.106445] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1618.139662] env[62820]: INFO nova.scheduler.client.report [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Deleted allocations for instance 2587a273-0115-483a-ba5e-994c87bbc4d0 [ 1618.172257] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1618.172521] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1618.172765] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1618.173743] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1618.173743] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1618.173743] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1618.173743] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1618.173743] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1618.173997] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1618.173997] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1618.174173] env[62820]: DEBUG nova.virt.hardware [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1618.175075] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5d8232-25df-4671-a4ac-8b9cad281b23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.185582] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccc79a6-2ef6-4429-b5e9-6ba54b658b82 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.226906] env[62820]: DEBUG nova.compute.manager [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Received event network-changed-41144e4b-bdb5-419e-902a-b56903f292fc {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1618.227109] env[62820]: DEBUG nova.compute.manager [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Refreshing instance network info cache due to event network-changed-41144e4b-bdb5-419e-902a-b56903f292fc. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1618.227346] env[62820]: DEBUG oslo_concurrency.lockutils [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] Acquiring lock "refresh_cache-361b7da3-0e8c-4291-aba0-8b6116b8032f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.227592] env[62820]: DEBUG oslo_concurrency.lockutils [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] Acquired lock "refresh_cache-361b7da3-0e8c-4291-aba0-8b6116b8032f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.227748] env[62820]: DEBUG nova.network.neutron [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Refreshing network info cache for port 41144e4b-bdb5-419e-902a-b56903f292fc {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1618.371704] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.422063] env[62820]: INFO nova.compute.manager [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Took 27.57 seconds to build instance. [ 1618.424718] env[62820]: DEBUG nova.compute.manager [req-095f60ae-1890-411c-8d6b-f5f05a3a941c req-a1b0df1f-6d90-468c-b5ba-b05bf089f381 service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Received event network-vif-plugged-1d5518da-f98f-4610-94c2-bf2a0a4f8499 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1618.425029] env[62820]: DEBUG oslo_concurrency.lockutils [req-095f60ae-1890-411c-8d6b-f5f05a3a941c req-a1b0df1f-6d90-468c-b5ba-b05bf089f381 service nova] Acquiring lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.425162] env[62820]: DEBUG oslo_concurrency.lockutils [req-095f60ae-1890-411c-8d6b-f5f05a3a941c req-a1b0df1f-6d90-468c-b5ba-b05bf089f381 service nova] Lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.425307] env[62820]: DEBUG oslo_concurrency.lockutils [req-095f60ae-1890-411c-8d6b-f5f05a3a941c req-a1b0df1f-6d90-468c-b5ba-b05bf089f381 service nova] Lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.425520] env[62820]: DEBUG nova.compute.manager [req-095f60ae-1890-411c-8d6b-f5f05a3a941c req-a1b0df1f-6d90-468c-b5ba-b05bf089f381 service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] No waiting events found dispatching network-vif-plugged-1d5518da-f98f-4610-94c2-bf2a0a4f8499 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1618.425633] env[62820]: WARNING nova.compute.manager [req-095f60ae-1890-411c-8d6b-f5f05a3a941c req-a1b0df1f-6d90-468c-b5ba-b05bf089f381 service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Received unexpected event network-vif-plugged-1d5518da-f98f-4610-94c2-bf2a0a4f8499 for instance with vm_state building and task_state spawning. [ 1618.518941] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695909, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.519861] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Successfully updated port: 1d5518da-f98f-4610-94c2-bf2a0a4f8499 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1618.647315] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3b4a5249-16aa-4720-87bd-999a18086a37 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "2587a273-0115-483a-ba5e-994c87bbc4d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.051s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.926802] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ad748f-0b9f-4752-828b-7c82cb0fa3f3 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.084s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.958030] env[62820]: DEBUG nova.network.neutron [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Updated VIF entry in instance network info cache for port 41144e4b-bdb5-419e-902a-b56903f292fc. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1618.958416] env[62820]: DEBUG nova.network.neutron [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Updating instance_info_cache with network_info: [{"id": "41144e4b-bdb5-419e-902a-b56903f292fc", "address": "fa:16:3e:81:93:6a", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41144e4b-bd", "ovs_interfaceid": "41144e4b-bdb5-419e-902a-b56903f292fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.017368] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695909, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.025100] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "refresh_cache-9a1b9c99-57ef-4c16-97ca-739917c6c3d7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.025304] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "refresh_cache-9a1b9c99-57ef-4c16-97ca-739917c6c3d7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.025514] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1619.417270] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddb1eea-11b5-4aa4-8803-85dcbf64156c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.426330] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0469a5bd-52c0-472d-9a3f-d587ade9fff2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.460652] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e53d53-7745-405b-bd76-2b743ab1ef2b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.463483] env[62820]: DEBUG oslo_concurrency.lockutils [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] Releasing lock "refresh_cache-361b7da3-0e8c-4291-aba0-8b6116b8032f" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.463724] env[62820]: DEBUG nova.compute.manager [req-694cbbfe-3711-4555-b808-fd9b5d7f6f35 req-b64202b7-5e32-46db-94dd-a69c8275c250 service nova] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Received event network-vif-deleted-89fa6298-ff56-4900-8160-84554ea1e23c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1619.469636] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a939163-19dc-4022-88ba-01ad4dc7e224 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.482881] env[62820]: DEBUG nova.compute.provider_tree [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.516730] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695909, 'name': CreateVM_Task, 'duration_secs': 1.458782} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.516906] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1619.517677] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.517838] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.518303] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1619.518535] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbd70ef-9321-4087-9f8b-380cb7a6093b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.523429] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1619.523429] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bf3219-f229-1f9e-9c6d-8b2aa59556ff" [ 1619.523429] env[62820]: _type = "Task" [ 1619.523429] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.531962] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bf3219-f229-1f9e-9c6d-8b2aa59556ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.559470] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1619.688859] env[62820]: DEBUG nova.network.neutron [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Updating instance_info_cache with network_info: [{"id": "1d5518da-f98f-4610-94c2-bf2a0a4f8499", "address": "fa:16:3e:55:ad:c6", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d5518da-f9", "ovs_interfaceid": "1d5518da-f98f-4610-94c2-bf2a0a4f8499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.768418] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquiring lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.768736] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.768972] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquiring lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.769219] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.769436] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.772053] env[62820]: INFO nova.compute.manager [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Terminating instance [ 1619.794264] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "4ae63ae5-0306-4540-be88-6e7d909c38a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.794504] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "4ae63ae5-0306-4540-be88-6e7d909c38a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.794711] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "4ae63ae5-0306-4540-be88-6e7d909c38a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.794900] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "4ae63ae5-0306-4540-be88-6e7d909c38a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.795087] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "4ae63ae5-0306-4540-be88-6e7d909c38a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.797010] env[62820]: INFO nova.compute.manager [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Terminating instance [ 1619.985818] env[62820]: DEBUG nova.scheduler.client.report [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1620.034554] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52bf3219-f229-1f9e-9c6d-8b2aa59556ff, 'name': SearchDatastore_Task, 'duration_secs': 0.018598} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.034852] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.035124] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1620.035495] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.035684] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.035916] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1620.036202] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a7c9faa-5329-49a3-94a7-b38372cc147e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.046391] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1620.046625] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1620.047352] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1f03c91-44f5-4985-ae35-4b95503b3998 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.053324] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1620.053324] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a920cc-74b5-0389-c98d-90229016dfd4" [ 1620.053324] env[62820]: _type = "Task" [ 1620.053324] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.061065] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a920cc-74b5-0389-c98d-90229016dfd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.192432] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "refresh_cache-9a1b9c99-57ef-4c16-97ca-739917c6c3d7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.192789] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Instance network_info: |[{"id": "1d5518da-f98f-4610-94c2-bf2a0a4f8499", "address": "fa:16:3e:55:ad:c6", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d5518da-f9", "ovs_interfaceid": "1d5518da-f98f-4610-94c2-bf2a0a4f8499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1620.193233] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:ad:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d5518da-f98f-4610-94c2-bf2a0a4f8499', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1620.200646] env[62820]: DEBUG oslo.service.loopingcall [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1620.200860] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1620.201090] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf17a56b-9d3e-4711-b49d-34bbee506e7f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.221746] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1620.221746] env[62820]: value = "task-1695910" [ 1620.221746] env[62820]: _type = "Task" [ 1620.221746] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.232377] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695910, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.276068] env[62820]: DEBUG nova.compute.manager [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1620.276310] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1620.277239] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8968eeb9-8f67-4435-a099-7d241029b460 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.285636] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1620.285964] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b09ae80-a28a-49d1-992c-c927b6583865 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.293655] env[62820]: DEBUG oslo_vmware.api [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1620.293655] env[62820]: value = "task-1695911" [ 1620.293655] env[62820]: _type = "Task" [ 1620.293655] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.302624] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "refresh_cache-4ae63ae5-0306-4540-be88-6e7d909c38a3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.302863] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquired lock "refresh_cache-4ae63ae5-0306-4540-be88-6e7d909c38a3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.302948] env[62820]: DEBUG nova.network.neutron [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1620.304151] env[62820]: DEBUG oslo_vmware.api [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.487241] env[62820]: DEBUG nova.compute.manager [req-40e82211-d822-427c-b8a4-35e5a3b153df req-ae273c1c-3bb3-4971-a17f-ac485466f5ac service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Received event network-changed-1d5518da-f98f-4610-94c2-bf2a0a4f8499 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1620.487575] env[62820]: DEBUG nova.compute.manager [req-40e82211-d822-427c-b8a4-35e5a3b153df req-ae273c1c-3bb3-4971-a17f-ac485466f5ac service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Refreshing instance network info cache due to event network-changed-1d5518da-f98f-4610-94c2-bf2a0a4f8499. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1620.487862] env[62820]: DEBUG oslo_concurrency.lockutils [req-40e82211-d822-427c-b8a4-35e5a3b153df req-ae273c1c-3bb3-4971-a17f-ac485466f5ac service nova] Acquiring lock "refresh_cache-9a1b9c99-57ef-4c16-97ca-739917c6c3d7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.488075] env[62820]: DEBUG oslo_concurrency.lockutils [req-40e82211-d822-427c-b8a4-35e5a3b153df req-ae273c1c-3bb3-4971-a17f-ac485466f5ac service nova] Acquired lock "refresh_cache-9a1b9c99-57ef-4c16-97ca-739917c6c3d7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.488256] env[62820]: DEBUG nova.network.neutron [req-40e82211-d822-427c-b8a4-35e5a3b153df req-ae273c1c-3bb3-4971-a17f-ac485466f5ac service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Refreshing network info cache for port 1d5518da-f98f-4610-94c2-bf2a0a4f8499 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1620.490877] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.491420] env[62820]: DEBUG nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1620.494380] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.022s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.494643] env[62820]: DEBUG nova.objects.instance [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lazy-loading 'resources' on Instance uuid 9114a81d-86a9-493b-9c07-c4724a0588ac {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1620.565487] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a920cc-74b5-0389-c98d-90229016dfd4, 'name': SearchDatastore_Task, 'duration_secs': 0.019311} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.566412] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c49aa628-b329-4eef-9900-7466d9a82777 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.572160] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1620.572160] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521cde29-1ce9-58a6-2404-f2e2c48255e5" [ 1620.572160] env[62820]: _type = "Task" [ 1620.572160] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.580536] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521cde29-1ce9-58a6-2404-f2e2c48255e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.731878] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695910, 'name': CreateVM_Task, 'duration_secs': 0.438064} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.732074] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1620.732736] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.732901] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.733251] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1620.733502] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-972fecf6-95d0-4f5d-bf14-522f894df5df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.738874] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1620.738874] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528c8e99-11ec-69ee-873c-8d89fbd575d8" [ 1620.738874] env[62820]: _type = "Task" [ 1620.738874] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.746478] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528c8e99-11ec-69ee-873c-8d89fbd575d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.804347] env[62820]: DEBUG oslo_vmware.api [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695911, 'name': PowerOffVM_Task, 'duration_secs': 0.222316} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.804644] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1620.804816] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1620.806925] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ed6832b-8309-41aa-af6e-a2b2ba1d1421 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.823782] env[62820]: DEBUG nova.network.neutron [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1620.880478] env[62820]: DEBUG nova.network.neutron [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.887242] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1620.887543] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1620.887737] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Deleting the datastore file [datastore1] c0d14c00-2c93-490c-8b17-91d3b5ee5b3d {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1620.888084] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-185bab19-1c61-421c-9589-30eae6fed6da {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.896745] env[62820]: DEBUG oslo_vmware.api [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for the task: (returnval){ [ 1620.896745] env[62820]: value = "task-1695913" [ 1620.896745] env[62820]: _type = "Task" [ 1620.896745] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.905601] env[62820]: DEBUG oslo_vmware.api [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695913, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.000531] env[62820]: DEBUG nova.compute.utils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1621.003626] env[62820]: DEBUG nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1621.003626] env[62820]: DEBUG nova.network.neutron [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1621.044121] env[62820]: DEBUG nova.policy [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b328ffc83d344899fcbbb6e9ade1698', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bdc42fe98fb43d7bd92e2dd789aff93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1621.087773] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521cde29-1ce9-58a6-2404-f2e2c48255e5, 'name': SearchDatastore_Task, 'duration_secs': 0.010971} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.087773] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.087773] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 361b7da3-0e8c-4291-aba0-8b6116b8032f/361b7da3-0e8c-4291-aba0-8b6116b8032f.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1621.087773] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14cb1c0f-d0dc-4206-8749-764f4e9009e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.098669] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1621.098669] env[62820]: value = "task-1695914" [ 1621.098669] env[62820]: _type = "Task" [ 1621.098669] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.107220] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.226179] env[62820]: DEBUG nova.network.neutron [req-40e82211-d822-427c-b8a4-35e5a3b153df req-ae273c1c-3bb3-4971-a17f-ac485466f5ac service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Updated VIF entry in instance network info cache for port 1d5518da-f98f-4610-94c2-bf2a0a4f8499. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1621.226179] env[62820]: DEBUG nova.network.neutron [req-40e82211-d822-427c-b8a4-35e5a3b153df req-ae273c1c-3bb3-4971-a17f-ac485466f5ac service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Updating instance_info_cache with network_info: [{"id": "1d5518da-f98f-4610-94c2-bf2a0a4f8499", "address": "fa:16:3e:55:ad:c6", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d5518da-f9", "ovs_interfaceid": "1d5518da-f98f-4610-94c2-bf2a0a4f8499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.254847] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528c8e99-11ec-69ee-873c-8d89fbd575d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.255423] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.255842] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1621.256253] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.256538] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.256881] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1621.257252] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bbc0a9d-5c0e-4e55-a262-0e3efebd57ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.274198] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1621.274402] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1621.275177] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a4c786c-012c-4109-aa4d-076a024419a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.283885] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1621.283885] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c60a92-eb33-e16d-e060-95ea85d5ca1f" [ 1621.283885] env[62820]: _type = "Task" [ 1621.283885] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.292893] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c60a92-eb33-e16d-e060-95ea85d5ca1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.340199] env[62820]: DEBUG nova.network.neutron [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Successfully created port: 4cf9f650-b061-4b02-bf65-8379061b1938 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1621.379597] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce20ee58-a574-4158-b14f-5d404a057212 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.383178] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Releasing lock "refresh_cache-4ae63ae5-0306-4540-be88-6e7d909c38a3" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.383633] env[62820]: DEBUG nova.compute.manager [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1621.383846] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1621.384661] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942afd53-482e-4ad5-94a1-0ae6f1a17302 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.393277] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1195f46b-4c13-42be-b1a4-121aa570e646 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.399268] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1621.402944] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1569ec4a-bed6-46fa-a209-11b0f073a45f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.430723] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283cbf8d-501a-4431-bc6a-1d3e52251a34 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.436353] env[62820]: DEBUG oslo_vmware.api [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Task: {'id': task-1695913, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173822} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.438177] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1621.438389] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1621.438600] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1621.439725] env[62820]: INFO nova.compute.manager [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1621.439725] env[62820]: DEBUG oslo.service.loopingcall [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1621.439725] env[62820]: DEBUG oslo_vmware.api [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1621.439725] env[62820]: value = "task-1695915" [ 1621.439725] env[62820]: _type = "Task" [ 1621.439725] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.439725] env[62820]: DEBUG nova.compute.manager [-] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1621.439725] env[62820]: DEBUG nova.network.neutron [-] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1621.450254] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6226d477-9761-40ce-b7af-290adfaba444 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.458239] env[62820]: DEBUG oslo_vmware.api [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.470713] env[62820]: DEBUG nova.compute.provider_tree [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.506513] env[62820]: DEBUG nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1621.610633] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695914, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513121} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.610994] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 361b7da3-0e8c-4291-aba0-8b6116b8032f/361b7da3-0e8c-4291-aba0-8b6116b8032f.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1621.611235] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1621.611772] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-809561dd-afc9-4215-9aaf-752676c90b84 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.627041] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1621.627041] env[62820]: value = "task-1695916" [ 1621.627041] env[62820]: _type = "Task" [ 1621.627041] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.642326] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695916, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.729404] env[62820]: DEBUG oslo_concurrency.lockutils [req-40e82211-d822-427c-b8a4-35e5a3b153df req-ae273c1c-3bb3-4971-a17f-ac485466f5ac service nova] Releasing lock "refresh_cache-9a1b9c99-57ef-4c16-97ca-739917c6c3d7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.796657] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c60a92-eb33-e16d-e060-95ea85d5ca1f, 'name': SearchDatastore_Task, 'duration_secs': 0.084078} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.797524] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1184e6ed-c3d7-4709-9359-9fc85534cd0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.804571] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1621.804571] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c29ad8-0ce1-ecd5-108c-f89b3430d9cb" [ 1621.804571] env[62820]: _type = "Task" [ 1621.804571] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.813411] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c29ad8-0ce1-ecd5-108c-f89b3430d9cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.952528] env[62820]: DEBUG oslo_vmware.api [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695915, 'name': PowerOffVM_Task, 'duration_secs': 0.147941} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.952734] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1621.952908] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1621.953185] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c95f719c-b31e-4c80-af76-a4c9b26f04df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.974079] env[62820]: DEBUG nova.scheduler.client.report [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1621.983609] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1621.983767] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1621.983979] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Deleting the datastore file [datastore1] 4ae63ae5-0306-4540-be88-6e7d909c38a3 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1621.984263] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-286e0572-0c26-464c-8751-2e72562db360 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.992513] env[62820]: DEBUG oslo_vmware.api [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for the task: (returnval){ [ 1621.992513] env[62820]: value = "task-1695918" [ 1621.992513] env[62820]: _type = "Task" [ 1621.992513] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.002523] env[62820]: DEBUG oslo_vmware.api [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.137868] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695916, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072339} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.138334] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1622.139212] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09d71df-a819-40ec-a18d-03c0aa972575 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.163985] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 361b7da3-0e8c-4291-aba0-8b6116b8032f/361b7da3-0e8c-4291-aba0-8b6116b8032f.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1622.164312] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0772c5e7-5cb7-4466-86a6-609966a03f1d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.184637] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1622.184637] env[62820]: value = "task-1695919" [ 1622.184637] env[62820]: _type = "Task" [ 1622.184637] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.193911] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695919, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.231664] env[62820]: DEBUG nova.network.neutron [-] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.318087] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c29ad8-0ce1-ecd5-108c-f89b3430d9cb, 'name': SearchDatastore_Task, 'duration_secs': 0.017505} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.318191] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.319035] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9a1b9c99-57ef-4c16-97ca-739917c6c3d7/9a1b9c99-57ef-4c16-97ca-739917c6c3d7.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1622.319035] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c555defb-6d5c-49bb-86aa-5d326b17cc7d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.326647] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1622.326647] env[62820]: value = "task-1695920" [ 1622.326647] env[62820]: _type = "Task" [ 1622.326647] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.335009] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.479904] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.985s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.482246] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.449s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.482484] env[62820]: DEBUG nova.objects.instance [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lazy-loading 'resources' on Instance uuid 207efed9-20ea-4b9e-bca2-45521b41de6a {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1622.503236] env[62820]: DEBUG oslo_vmware.api [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Task: {'id': task-1695918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093686} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.504354] env[62820]: INFO nova.scheduler.client.report [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted allocations for instance 9114a81d-86a9-493b-9c07-c4724a0588ac [ 1622.509137] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1622.509355] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1622.509394] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1622.509536] env[62820]: INFO nova.compute.manager [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1622.509809] env[62820]: DEBUG oslo.service.loopingcall [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1622.510336] env[62820]: DEBUG nova.compute.manager [-] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1622.510427] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1622.514398] env[62820]: DEBUG nova.compute.manager [req-384cbd68-c7d5-4e82-bcc8-5af2c2d6c03c req-4a0f99b9-ec5e-4caf-8cf4-008694ec898e service nova] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Received event network-vif-deleted-22b45c92-acc9-495a-97db-769521fc7c3b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1622.516055] env[62820]: DEBUG nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1622.531653] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1622.544897] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1622.545188] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1622.545407] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1622.545660] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1622.545868] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1622.546049] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1622.546326] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1622.546534] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1622.546762] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1622.546980] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1622.547236] env[62820]: DEBUG nova.virt.hardware [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1622.548540] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c732e4-10a2-4de1-b87f-aae47a1484f8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.558868] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da53c534-b816-4d22-a471-9d289741c64d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.695130] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695919, 'name': ReconfigVM_Task, 'duration_secs': 0.499073} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.695439] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 361b7da3-0e8c-4291-aba0-8b6116b8032f/361b7da3-0e8c-4291-aba0-8b6116b8032f.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1622.696675] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ed0b52c-9d1d-42b4-9111-a0318d2a5d21 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.704050] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1622.704050] env[62820]: value = "task-1695921" [ 1622.704050] env[62820]: _type = "Task" [ 1622.704050] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.718367] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695921, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.735220] env[62820]: INFO nova.compute.manager [-] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Took 1.30 seconds to deallocate network for instance. [ 1622.837915] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4899} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.838260] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9a1b9c99-57ef-4c16-97ca-739917c6c3d7/9a1b9c99-57ef-4c16-97ca-739917c6c3d7.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1622.838518] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1622.838778] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65185edc-3443-416a-89c5-04502a7c332f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.845500] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1622.845500] env[62820]: value = "task-1695922" [ 1622.845500] env[62820]: _type = "Task" [ 1622.845500] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.856021] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695922, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.913888] env[62820]: DEBUG nova.compute.manager [req-5218eb30-a088-4360-b957-38f11b01495f req-2520b512-8484-4926-99db-134cfa718d5d service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Received event network-vif-plugged-4cf9f650-b061-4b02-bf65-8379061b1938 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1622.914155] env[62820]: DEBUG oslo_concurrency.lockutils [req-5218eb30-a088-4360-b957-38f11b01495f req-2520b512-8484-4926-99db-134cfa718d5d service nova] Acquiring lock "0d519bc8-3cc1-429e-b41b-ed0035622562-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.914590] env[62820]: DEBUG oslo_concurrency.lockutils [req-5218eb30-a088-4360-b957-38f11b01495f req-2520b512-8484-4926-99db-134cfa718d5d service nova] Lock "0d519bc8-3cc1-429e-b41b-ed0035622562-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.914590] env[62820]: DEBUG oslo_concurrency.lockutils [req-5218eb30-a088-4360-b957-38f11b01495f req-2520b512-8484-4926-99db-134cfa718d5d service nova] Lock "0d519bc8-3cc1-429e-b41b-ed0035622562-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.914891] env[62820]: DEBUG nova.compute.manager [req-5218eb30-a088-4360-b957-38f11b01495f req-2520b512-8484-4926-99db-134cfa718d5d service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] No waiting events found dispatching network-vif-plugged-4cf9f650-b061-4b02-bf65-8379061b1938 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1622.915073] env[62820]: WARNING nova.compute.manager [req-5218eb30-a088-4360-b957-38f11b01495f req-2520b512-8484-4926-99db-134cfa718d5d service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Received unexpected event network-vif-plugged-4cf9f650-b061-4b02-bf65-8379061b1938 for instance with vm_state building and task_state spawning. [ 1623.013174] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa81dfb5-12fb-448c-bfbd-87b2a4868704 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "9114a81d-86a9-493b-9c07-c4724a0588ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.009s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.024377] env[62820]: DEBUG nova.network.neutron [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Successfully updated port: 4cf9f650-b061-4b02-bf65-8379061b1938 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1623.034461] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.215557] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695921, 'name': Rename_Task, 'duration_secs': 0.235632} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.215831] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1623.216107] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cad33676-07e3-412c-b101-98ece353a59a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.224444] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1623.224444] env[62820]: value = "task-1695923" [ 1623.224444] env[62820]: _type = "Task" [ 1623.224444] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.232539] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695923, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.242975] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.313757] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0f789b-f08d-473c-a6fa-dad6189acaca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.321843] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a672cf-cd7b-4eec-aec8-03abe89e8425 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.355584] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc02fa7-d5ef-4a4e-9c28-20bff98a6220 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.365713] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695922, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070921} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.366931] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391f3141-841e-43e0-a5ef-ccba51fdd8b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.370666] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1623.371309] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38f8999-6fd1-43f7-9197-8653d1885796 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.385476] env[62820]: DEBUG nova.compute.provider_tree [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1623.403620] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 9a1b9c99-57ef-4c16-97ca-739917c6c3d7/9a1b9c99-57ef-4c16-97ca-739917c6c3d7.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1623.404652] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54d45b29-fc03-468c-87b2-396794d81a0e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.423869] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1623.423869] env[62820]: value = "task-1695924" [ 1623.423869] env[62820]: _type = "Task" [ 1623.423869] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.432627] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695924, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.528261] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "refresh_cache-0d519bc8-3cc1-429e-b41b-ed0035622562" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.528567] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "refresh_cache-0d519bc8-3cc1-429e-b41b-ed0035622562" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.528779] env[62820]: DEBUG nova.network.neutron [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1623.537044] env[62820]: INFO nova.compute.manager [-] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Took 1.03 seconds to deallocate network for instance. [ 1623.736637] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695923, 'name': PowerOnVM_Task, 'duration_secs': 0.475351} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.737019] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1623.737320] env[62820]: INFO nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1623.737585] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1623.738546] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a71d7b-aaf8-4e88-9ad0-45b429a42371 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.922697] env[62820]: ERROR nova.scheduler.client.report [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] [req-a152be93-a579-48b6-bad0-c43251bf4451] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a152be93-a579-48b6-bad0-c43251bf4451"}]} [ 1623.934823] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695924, 'name': ReconfigVM_Task, 'duration_secs': 0.290365} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.935606] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 9a1b9c99-57ef-4c16-97ca-739917c6c3d7/9a1b9c99-57ef-4c16-97ca-739917c6c3d7.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1623.935905] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d86b40d-79a0-4213-8fd9-7ef708453112 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.940198] env[62820]: DEBUG nova.scheduler.client.report [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1623.944136] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1623.944136] env[62820]: value = "task-1695925" [ 1623.944136] env[62820]: _type = "Task" [ 1623.944136] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.955086] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695925, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.956940] env[62820]: DEBUG nova.scheduler.client.report [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1623.956940] env[62820]: DEBUG nova.compute.provider_tree [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1623.969955] env[62820]: DEBUG nova.scheduler.client.report [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1623.991057] env[62820]: DEBUG nova.scheduler.client.report [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1624.043761] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.063012] env[62820]: DEBUG nova.network.neutron [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1624.211792] env[62820]: DEBUG nova.network.neutron [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Updating instance_info_cache with network_info: [{"id": "4cf9f650-b061-4b02-bf65-8379061b1938", "address": "fa:16:3e:99:a9:21", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cf9f650-b0", "ovs_interfaceid": "4cf9f650-b061-4b02-bf65-8379061b1938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.255424] env[62820]: INFO nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Took 22.21 seconds to build instance. [ 1624.288711] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110ec1ed-78f1-4095-81c0-ef55497d5d08 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.296866] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9988e13d-8879-44e4-8d6a-32e1b9419f3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.329975] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b694cf09-4b10-4ce5-8a9b-6760388bcef8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.338781] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05de548-63d1-4698-a066-1b19347f9b8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.352622] env[62820]: DEBUG nova.compute.provider_tree [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1624.454384] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695925, 'name': Rename_Task, 'duration_secs': 0.152071} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.455038] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1624.455038] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8eedc305-d580-47ea-bfbf-6dd794b059fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.463342] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1624.463342] env[62820]: value = "task-1695926" [ 1624.463342] env[62820]: _type = "Task" [ 1624.463342] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.488042] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.716937] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "refresh_cache-0d519bc8-3cc1-429e-b41b-ed0035622562" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.717312] env[62820]: DEBUG nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Instance network_info: |[{"id": "4cf9f650-b061-4b02-bf65-8379061b1938", "address": "fa:16:3e:99:a9:21", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cf9f650-b0", "ovs_interfaceid": "4cf9f650-b061-4b02-bf65-8379061b1938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1624.717838] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:a9:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cf9f650-b061-4b02-bf65-8379061b1938', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1624.725305] env[62820]: DEBUG oslo.service.loopingcall [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1624.726050] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1624.726050] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29d59d6f-49d9-4565-aac4-faa40aaddd79 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.746024] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1624.746024] env[62820]: value = "task-1695927" [ 1624.746024] env[62820]: _type = "Task" [ 1624.746024] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.754052] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695927, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.759644] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "361b7da3-0e8c-4291-aba0-8b6116b8032f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.726s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.856901] env[62820]: DEBUG nova.scheduler.client.report [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1624.945641] env[62820]: DEBUG nova.compute.manager [req-10c0df4f-c040-4ced-936e-3f2dbd3c1605 req-7f81ba34-78e1-4b26-9387-4f34a81555f5 service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Received event network-changed-4cf9f650-b061-4b02-bf65-8379061b1938 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1624.945864] env[62820]: DEBUG nova.compute.manager [req-10c0df4f-c040-4ced-936e-3f2dbd3c1605 req-7f81ba34-78e1-4b26-9387-4f34a81555f5 service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Refreshing instance network info cache due to event network-changed-4cf9f650-b061-4b02-bf65-8379061b1938. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1624.946100] env[62820]: DEBUG oslo_concurrency.lockutils [req-10c0df4f-c040-4ced-936e-3f2dbd3c1605 req-7f81ba34-78e1-4b26-9387-4f34a81555f5 service nova] Acquiring lock "refresh_cache-0d519bc8-3cc1-429e-b41b-ed0035622562" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.946251] env[62820]: DEBUG oslo_concurrency.lockutils [req-10c0df4f-c040-4ced-936e-3f2dbd3c1605 req-7f81ba34-78e1-4b26-9387-4f34a81555f5 service nova] Acquired lock "refresh_cache-0d519bc8-3cc1-429e-b41b-ed0035622562" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.946412] env[62820]: DEBUG nova.network.neutron [req-10c0df4f-c040-4ced-936e-3f2dbd3c1605 req-7f81ba34-78e1-4b26-9387-4f34a81555f5 service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Refreshing network info cache for port 4cf9f650-b061-4b02-bf65-8379061b1938 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1624.976040] env[62820]: DEBUG oslo_vmware.api [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695926, 'name': PowerOnVM_Task, 'duration_secs': 0.48427} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.976040] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1624.976040] env[62820]: INFO nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Took 6.87 seconds to spawn the instance on the hypervisor. [ 1624.976040] env[62820]: DEBUG nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1624.976040] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c2caaf-9ce6-47fe-835b-43739968522d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.256423] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695927, 'name': CreateVM_Task, 'duration_secs': 0.380108} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.256596] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1625.257286] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.257475] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.257791] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1625.258056] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c1abad5-d727-4b7e-b64f-a3e40eb7297d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.262893] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1625.262893] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524048ed-c179-decd-13c3-c99049087817" [ 1625.262893] env[62820]: _type = "Task" [ 1625.262893] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.271963] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524048ed-c179-decd-13c3-c99049087817, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.362451] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.880s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.364715] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.778s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.364950] env[62820]: DEBUG nova.objects.instance [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'resources' on Instance uuid 4ac8c3b8-e5e5-4a74-a430-a88e856b705e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1625.380013] env[62820]: INFO nova.scheduler.client.report [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Deleted allocations for instance 207efed9-20ea-4b9e-bca2-45521b41de6a [ 1625.494189] env[62820]: INFO nova.compute.manager [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Took 23.41 seconds to build instance. [ 1625.667330] env[62820]: DEBUG nova.network.neutron [req-10c0df4f-c040-4ced-936e-3f2dbd3c1605 req-7f81ba34-78e1-4b26-9387-4f34a81555f5 service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Updated VIF entry in instance network info cache for port 4cf9f650-b061-4b02-bf65-8379061b1938. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1625.667849] env[62820]: DEBUG nova.network.neutron [req-10c0df4f-c040-4ced-936e-3f2dbd3c1605 req-7f81ba34-78e1-4b26-9387-4f34a81555f5 service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Updating instance_info_cache with network_info: [{"id": "4cf9f650-b061-4b02-bf65-8379061b1938", "address": "fa:16:3e:99:a9:21", "network": {"id": "ea4829f0-fe1f-4b60-a6f9-ad4d50f6335a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2095508525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bdc42fe98fb43d7bd92e2dd789aff93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cf9f650-b0", "ovs_interfaceid": "4cf9f650-b061-4b02-bf65-8379061b1938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.774257] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524048ed-c179-decd-13c3-c99049087817, 'name': SearchDatastore_Task, 'duration_secs': 0.059381} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.774574] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.774809] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1625.775072] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.775223] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.775408] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1625.775664] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18132897-598b-4b85-a919-eb6313a1959e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.784679] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1625.784931] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1625.785693] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d8a97ca-f890-43ef-9206-b2c9d6ada5cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.791930] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1625.791930] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c619dc-9d84-68ca-be42-7bf46af4f0c9" [ 1625.791930] env[62820]: _type = "Task" [ 1625.791930] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.800640] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c619dc-9d84-68ca-be42-7bf46af4f0c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.869995] env[62820]: DEBUG nova.objects.instance [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'numa_topology' on Instance uuid 4ac8c3b8-e5e5-4a74-a430-a88e856b705e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1625.886013] env[62820]: DEBUG oslo_concurrency.lockutils [None req-94689813-ad46-4fef-8736-2ec0bb0a1290 tempest-ServersAdminNegativeTestJSON-1878610368 tempest-ServersAdminNegativeTestJSON-1878610368-project-member] Lock "207efed9-20ea-4b9e-bca2-45521b41de6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.302s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.996300] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f1cbc12c-c441-473e-b13d-73344cc05814 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.925s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.170410] env[62820]: DEBUG oslo_concurrency.lockutils [req-10c0df4f-c040-4ced-936e-3f2dbd3c1605 req-7f81ba34-78e1-4b26-9387-4f34a81555f5 service nova] Releasing lock "refresh_cache-0d519bc8-3cc1-429e-b41b-ed0035622562" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.303133] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c619dc-9d84-68ca-be42-7bf46af4f0c9, 'name': SearchDatastore_Task, 'duration_secs': 0.010201} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.303984] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d009dddc-4c93-408e-b3f2-2c1991c3ebdf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.310391] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1626.310391] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522dc264-9080-b763-2097-f2c12f2af3ad" [ 1626.310391] env[62820]: _type = "Task" [ 1626.310391] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.320904] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522dc264-9080-b763-2097-f2c12f2af3ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.375173] env[62820]: DEBUG nova.objects.base [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Object Instance<4ac8c3b8-e5e5-4a74-a430-a88e856b705e> lazy-loaded attributes: resources,numa_topology {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1626.684478] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61c3373-34f4-48e4-a1e0-2cc5a1895446 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.694568] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aceafcff-0f02-4dcd-9a7e-b80f1f64af22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.726163] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4a90fb-3d78-4424-9473-926d330ae128 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.734761] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0125a6b6-4ec0-43bf-90fa-cb215ddc9d66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.749305] env[62820]: DEBUG nova.compute.provider_tree [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1626.822445] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522dc264-9080-b763-2097-f2c12f2af3ad, 'name': SearchDatastore_Task, 'duration_secs': 0.012135} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.823122] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.823396] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0d519bc8-3cc1-429e-b41b-ed0035622562/0d519bc8-3cc1-429e-b41b-ed0035622562.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1626.823672] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db2e9a24-c984-4586-9cfe-960ee7b7de51 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.832080] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1626.832080] env[62820]: value = "task-1695928" [ 1626.832080] env[62820]: _type = "Task" [ 1626.832080] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.841262] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.876336] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "361b7da3-0e8c-4291-aba0-8b6116b8032f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.876502] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "361b7da3-0e8c-4291-aba0-8b6116b8032f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.876839] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "361b7da3-0e8c-4291-aba0-8b6116b8032f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.876915] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "361b7da3-0e8c-4291-aba0-8b6116b8032f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.877269] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "361b7da3-0e8c-4291-aba0-8b6116b8032f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.879627] env[62820]: INFO nova.compute.manager [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Terminating instance [ 1626.955668] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.955939] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.956159] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.956344] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.956514] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.958967] env[62820]: INFO nova.compute.manager [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Terminating instance [ 1627.252723] env[62820]: DEBUG nova.scheduler.client.report [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1627.345220] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695928, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.386202] env[62820]: DEBUG nova.compute.manager [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1627.386437] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1627.387366] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65de263c-59a2-492a-9568-4c8458ef9ecd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.395476] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1627.395741] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcb25f93-12ea-4e2d-86b8-0c1e73216063 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.402945] env[62820]: DEBUG oslo_vmware.api [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1627.402945] env[62820]: value = "task-1695929" [ 1627.402945] env[62820]: _type = "Task" [ 1627.402945] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.411980] env[62820]: DEBUG oslo_vmware.api [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695929, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.462933] env[62820]: DEBUG nova.compute.manager [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1627.463200] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1627.464189] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805bf331-23e5-4514-aa39-de7f47452be4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.473595] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1627.473867] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c46df06f-0529-4766-9bde-98004d138fde {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.483271] env[62820]: DEBUG oslo_vmware.api [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1627.483271] env[62820]: value = "task-1695930" [ 1627.483271] env[62820]: _type = "Task" [ 1627.483271] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.493900] env[62820]: DEBUG oslo_vmware.api [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.758185] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.393s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.761051] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.838s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.762709] env[62820]: INFO nova.compute.claims [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1627.847682] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695928, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.953883} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.848051] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0d519bc8-3cc1-429e-b41b-ed0035622562/0d519bc8-3cc1-429e-b41b-ed0035622562.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1627.848260] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1627.848722] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4f8affe-bc3f-4e72-91f5-b868ec14f524 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.858135] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1627.858135] env[62820]: value = "task-1695931" [ 1627.858135] env[62820]: _type = "Task" [ 1627.858135] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.879384] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.913083] env[62820]: DEBUG oslo_vmware.api [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695929, 'name': PowerOffVM_Task, 'duration_secs': 0.486192} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.913373] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1627.913547] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1627.913808] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d912946e-2c89-4168-bfde-d2594caed66f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.993403] env[62820]: DEBUG oslo_vmware.api [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695930, 'name': PowerOffVM_Task, 'duration_secs': 0.283305} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.993605] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1627.993842] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1627.995024] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36493b3b-5a85-4483-a767-33fc0507fc5a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.050825] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1628.050825] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1628.050825] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleting the datastore file [datastore1] 361b7da3-0e8c-4291-aba0-8b6116b8032f {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.050825] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27b0c2a5-7320-4936-be46-6cffc4c4a2bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.057230] env[62820]: DEBUG oslo_vmware.api [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1628.057230] env[62820]: value = "task-1695934" [ 1628.057230] env[62820]: _type = "Task" [ 1628.057230] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.067196] env[62820]: DEBUG oslo_vmware.api [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.076304] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1628.076569] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1628.076749] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleting the datastore file [datastore1] 9a1b9c99-57ef-4c16-97ca-739917c6c3d7 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.077765] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cd4b9da-9ba3-4d34-993f-cb53f4991ccc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.085593] env[62820]: DEBUG oslo_vmware.api [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1628.085593] env[62820]: value = "task-1695935" [ 1628.085593] env[62820]: _type = "Task" [ 1628.085593] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.094553] env[62820]: DEBUG oslo_vmware.api [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.279200] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3d713de1-8d01-46c1-a495-147b4b7c4c3f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 35.399s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.282456] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 16.104s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.282688] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.282897] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.283075] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.288123] env[62820]: INFO nova.compute.manager [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Terminating instance [ 1628.369545] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073809} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.369897] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1628.370755] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdcca18-224d-4704-bf53-784485e7a417 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.396023] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 0d519bc8-3cc1-429e-b41b-ed0035622562/0d519bc8-3cc1-429e-b41b-ed0035622562.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1628.396487] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1a80db3-779a-46c1-8f7d-ac2e9681dff9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.417731] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1628.417731] env[62820]: value = "task-1695936" [ 1628.417731] env[62820]: _type = "Task" [ 1628.417731] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.426554] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.572706] env[62820]: DEBUG oslo_vmware.api [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163649} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.572973] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1628.573774] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1628.573984] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1628.574452] env[62820]: INFO nova.compute.manager [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1628.574452] env[62820]: DEBUG oslo.service.loopingcall [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.574601] env[62820]: DEBUG nova.compute.manager [-] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1628.574693] env[62820]: DEBUG nova.network.neutron [-] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1628.598032] env[62820]: DEBUG oslo_vmware.api [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1695935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175146} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.598420] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1628.598806] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1628.599027] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1628.599561] env[62820]: INFO nova.compute.manager [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1628.599561] env[62820]: DEBUG oslo.service.loopingcall [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.599674] env[62820]: DEBUG nova.compute.manager [-] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1628.599727] env[62820]: DEBUG nova.network.neutron [-] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1628.797884] env[62820]: DEBUG nova.compute.manager [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1628.797884] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1628.797884] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64b92e59-59ec-4007-ac40-d77b6f4745ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.806908] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bb7545-e9d9-4b2b-886d-846efaf9c026 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.857614] env[62820]: WARNING nova.virt.vmwareapi.vmops [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ac8c3b8-e5e5-4a74-a430-a88e856b705e could not be found. [ 1628.858011] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1628.858257] env[62820]: INFO nova.compute.manager [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1628.858639] env[62820]: DEBUG oslo.service.loopingcall [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.858993] env[62820]: DEBUG nova.compute.manager [-] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1628.859160] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1628.928878] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695936, 'name': ReconfigVM_Task, 'duration_secs': 0.300507} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.931747] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 0d519bc8-3cc1-429e-b41b-ed0035622562/0d519bc8-3cc1-429e-b41b-ed0035622562.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1628.933082] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5486d98-6c90-4235-8e85-94d0858be3b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.941038] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1628.941038] env[62820]: value = "task-1695937" [ 1628.941038] env[62820]: _type = "Task" [ 1628.941038] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.955344] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695937, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.007119] env[62820]: DEBUG nova.compute.manager [req-7f834ab8-15aa-4b1c-bc65-a2c6b5e29d4f req-3382ed20-d1ad-46d7-8c53-2cc7282c28a1 service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Received event network-vif-deleted-1d5518da-f98f-4610-94c2-bf2a0a4f8499 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1629.007413] env[62820]: INFO nova.compute.manager [req-7f834ab8-15aa-4b1c-bc65-a2c6b5e29d4f req-3382ed20-d1ad-46d7-8c53-2cc7282c28a1 service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Neutron deleted interface 1d5518da-f98f-4610-94c2-bf2a0a4f8499; detaching it from the instance and deleting it from the info cache [ 1629.007668] env[62820]: DEBUG nova.network.neutron [req-7f834ab8-15aa-4b1c-bc65-a2c6b5e29d4f req-3382ed20-d1ad-46d7-8c53-2cc7282c28a1 service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.104330] env[62820]: DEBUG nova.compute.manager [req-a26cb798-4af9-4d4a-a079-a6fd7ea5335d req-6efa65e3-baf9-46bb-b622-808fd301736f service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Received event network-vif-deleted-41144e4b-bdb5-419e-902a-b56903f292fc {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1629.104611] env[62820]: INFO nova.compute.manager [req-a26cb798-4af9-4d4a-a079-a6fd7ea5335d req-6efa65e3-baf9-46bb-b622-808fd301736f service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Neutron deleted interface 41144e4b-bdb5-419e-902a-b56903f292fc; detaching it from the instance and deleting it from the info cache [ 1629.104934] env[62820]: DEBUG nova.network.neutron [req-a26cb798-4af9-4d4a-a079-a6fd7ea5335d req-6efa65e3-baf9-46bb-b622-808fd301736f service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.193734] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ab46cb-e46b-4884-a52a-3fb202407c48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.206991] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043cb1c6-6a6f-4bbb-9821-22b748840241 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.242794] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf975b23-9cc8-4a3e-8464-e27bae3b5780 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.251382] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a29207-5d8b-473a-85d0-0020a4d6b6d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.265704] env[62820]: DEBUG nova.compute.provider_tree [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.455518] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695937, 'name': Rename_Task, 'duration_secs': 0.15041} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.456300] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1629.456300] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a667d737-fb2d-41de-8cda-a6c084f2bf3c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.465549] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1629.465549] env[62820]: value = "task-1695938" [ 1629.465549] env[62820]: _type = "Task" [ 1629.465549] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.474824] env[62820]: DEBUG nova.network.neutron [-] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.484452] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.485981] env[62820]: INFO nova.compute.manager [-] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Took 0.89 seconds to deallocate network for instance. [ 1629.505342] env[62820]: DEBUG nova.network.neutron [-] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.518822] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10e1b268-08fd-40a6-8fee-cd67d378b217 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.536281] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8becd109-d5f3-4edf-a83b-9e96d5719fb1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.585035] env[62820]: DEBUG nova.compute.manager [req-7f834ab8-15aa-4b1c-bc65-a2c6b5e29d4f req-3382ed20-d1ad-46d7-8c53-2cc7282c28a1 service nova] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Detach interface failed, port_id=1d5518da-f98f-4610-94c2-bf2a0a4f8499, reason: Instance 9a1b9c99-57ef-4c16-97ca-739917c6c3d7 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1629.612942] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c9ccef8b-9468-401e-b0c5-c360adb1afa7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.627495] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe583e0a-05d6-4e9f-ac7d-0122323da504 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.660503] env[62820]: DEBUG nova.compute.manager [req-a26cb798-4af9-4d4a-a079-a6fd7ea5335d req-6efa65e3-baf9-46bb-b622-808fd301736f service nova] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Detach interface failed, port_id=41144e4b-bdb5-419e-902a-b56903f292fc, reason: Instance 361b7da3-0e8c-4291-aba0-8b6116b8032f could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1629.673725] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.769402] env[62820]: DEBUG nova.scheduler.client.report [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1629.978033] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695938, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.995779] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.010502] env[62820]: INFO nova.compute.manager [-] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Took 1.43 seconds to deallocate network for instance. [ 1630.178769] env[62820]: INFO nova.compute.manager [-] [instance: 4ac8c3b8-e5e5-4a74-a430-a88e856b705e] Took 1.32 seconds to deallocate network for instance. [ 1630.275531] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.276093] env[62820]: DEBUG nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1630.278872] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.211s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.279115] env[62820]: DEBUG nova.objects.instance [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1630.435742] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquiring lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.436016] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.479542] env[62820]: DEBUG oslo_vmware.api [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695938, 'name': PowerOnVM_Task, 'duration_secs': 0.642923} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.479817] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1630.480023] env[62820]: INFO nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Took 7.96 seconds to spawn the instance on the hypervisor. [ 1630.480203] env[62820]: DEBUG nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1630.481049] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefe9408-51bb-4bef-935a-b339f994c3c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.515932] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.783678] env[62820]: DEBUG nova.compute.utils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1630.787769] env[62820]: DEBUG nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1630.788028] env[62820]: DEBUG nova.network.neutron [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1630.846200] env[62820]: DEBUG nova.policy [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3459ab4d3d642699fa71b52dc1c5416', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2cd6822ef42b42b1b90e2cb4e7d20a8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1630.938726] env[62820]: DEBUG nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1631.007162] env[62820]: INFO nova.compute.manager [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Took 27.07 seconds to build instance. [ 1631.159316] env[62820]: DEBUG nova.network.neutron [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Successfully created port: b36fcffd-baf9-4baa-a860-018d98ea5451 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1631.216941] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d616f073-9cb3-4070-880f-2687669ea06c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4ac8c3b8-e5e5-4a74-a430-a88e856b705e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.934s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.291640] env[62820]: DEBUG nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1631.296173] env[62820]: DEBUG oslo_concurrency.lockutils [None req-888194e4-db92-45bd-91df-5222784f18a5 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.297952] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.120s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.299364] env[62820]: INFO nova.compute.claims [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1631.462764] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.509447] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fce1d200-b226-4c3a-a792-34797c6746a6 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "0d519bc8-3cc1-429e-b41b-ed0035622562" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.579s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.304171] env[62820]: DEBUG nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1632.312862] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "53ba381a-9f81-4c37-8758-af56fc165dd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.313051] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.327163] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1632.327408] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1632.327621] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1632.327827] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1632.327976] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1632.328139] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1632.328343] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1632.328508] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1632.328693] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1632.328859] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1632.329040] env[62820]: DEBUG nova.virt.hardware [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1632.330401] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc81b3b9-38f6-486a-964b-ee3f5bfc47c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.341998] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0330ac-530e-46a9-88a2-7fee88bbecac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.594417] env[62820]: DEBUG nova.compute.manager [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1632.595488] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e362b4-2899-4b80-a239-00ab96f9b27d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.613296] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc78b1c4-cc58-461d-b1f4-0c172d0ec662 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.621692] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ba64e0-fecd-483c-958c-9fe9de197b0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.655085] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13764f54-1b38-4d92-8813-13cb93624445 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.663639] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f76a44-d167-4628-9cd9-d604caa684fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.680065] env[62820]: DEBUG nova.compute.provider_tree [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.680585] env[62820]: DEBUG nova.network.neutron [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Successfully updated port: b36fcffd-baf9-4baa-a860-018d98ea5451 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1632.818200] env[62820]: DEBUG nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1633.073814] env[62820]: DEBUG nova.compute.manager [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Received event network-vif-plugged-b36fcffd-baf9-4baa-a860-018d98ea5451 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1633.074087] env[62820]: DEBUG oslo_concurrency.lockutils [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] Acquiring lock "46434419-d6de-4cc1-905c-14698512b7a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.074307] env[62820]: DEBUG oslo_concurrency.lockutils [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] Lock "46434419-d6de-4cc1-905c-14698512b7a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.074475] env[62820]: DEBUG oslo_concurrency.lockutils [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] Lock "46434419-d6de-4cc1-905c-14698512b7a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.074646] env[62820]: DEBUG nova.compute.manager [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] No waiting events found dispatching network-vif-plugged-b36fcffd-baf9-4baa-a860-018d98ea5451 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1633.074815] env[62820]: WARNING nova.compute.manager [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Received unexpected event network-vif-plugged-b36fcffd-baf9-4baa-a860-018d98ea5451 for instance with vm_state building and task_state spawning. [ 1633.074987] env[62820]: DEBUG nova.compute.manager [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Received event network-changed-b36fcffd-baf9-4baa-a860-018d98ea5451 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1633.075171] env[62820]: DEBUG nova.compute.manager [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Refreshing instance network info cache due to event network-changed-b36fcffd-baf9-4baa-a860-018d98ea5451. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1633.075354] env[62820]: DEBUG oslo_concurrency.lockutils [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] Acquiring lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.075493] env[62820]: DEBUG oslo_concurrency.lockutils [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] Acquired lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.075650] env[62820]: DEBUG nova.network.neutron [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Refreshing network info cache for port b36fcffd-baf9-4baa-a860-018d98ea5451 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1633.109377] env[62820]: INFO nova.compute.manager [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] instance snapshotting [ 1633.112045] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b8db8a-8a47-4140-9312-8c873eceb51a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.130924] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a3dce3-da24-4a1a-b7f3-de4b2c84d8f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.182838] env[62820]: DEBUG nova.scheduler.client.report [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1633.186381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.343268] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.606635] env[62820]: DEBUG nova.network.neutron [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1633.640727] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1633.641057] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-04043ab0-cc03-476f-ba67-4dee2c81ef0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.650642] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1633.650642] env[62820]: value = "task-1695939" [ 1633.650642] env[62820]: _type = "Task" [ 1633.650642] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.658664] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695939, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.683920] env[62820]: DEBUG nova.network.neutron [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.689309] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.693023] env[62820]: DEBUG nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1633.693023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.274s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.694121] env[62820]: INFO nova.compute.claims [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1634.161023] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695939, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.186983] env[62820]: DEBUG oslo_concurrency.lockutils [req-998f02e8-3edb-4f15-b9b9-167e508399de req-c9141979-33e0-4e17-a1c9-f297f302a031 service nova] Releasing lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.187470] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.187738] env[62820]: DEBUG nova.network.neutron [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1634.198261] env[62820]: DEBUG nova.compute.utils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1634.202532] env[62820]: DEBUG nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1634.202716] env[62820]: DEBUG nova.network.neutron [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1634.243454] env[62820]: DEBUG nova.policy [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3459ab4d3d642699fa71b52dc1c5416', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2cd6822ef42b42b1b90e2cb4e7d20a8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1634.661641] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695939, 'name': CreateSnapshot_Task, 'duration_secs': 0.572219} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.661935] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1634.662664] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a19063-fa41-44d9-be73-bb63fb16a9ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.689361] env[62820]: DEBUG nova.network.neutron [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Successfully created port: 40484755-60da-4d73-a825-a5d4eedee87b {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1634.703560] env[62820]: DEBUG nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1634.731929] env[62820]: DEBUG nova.network.neutron [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1634.887686] env[62820]: DEBUG nova.network.neutron [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Updating instance_info_cache with network_info: [{"id": "b36fcffd-baf9-4baa-a860-018d98ea5451", "address": "fa:16:3e:b4:92:08", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb36fcffd-ba", "ovs_interfaceid": "b36fcffd-baf9-4baa-a860-018d98ea5451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.065707] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585ee5fd-2c6b-45b6-9571-4e8cd892c847 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.073315] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3416e346-55f2-4a14-81a1-b3c637bb58a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.106752] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.107121] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.108954] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3267b9-5487-4612-9fa1-19dc9f4e5584 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.117560] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50da150a-fddc-49e4-99bd-f4f679022c73 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.131725] env[62820]: DEBUG nova.compute.provider_tree [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.180364] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1635.180960] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7cad6731-2794-4fa6-a08b-f1b999d3fe62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.190531] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1635.190531] env[62820]: value = "task-1695940" [ 1635.190531] env[62820]: _type = "Task" [ 1635.190531] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.199134] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695940, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.390254] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.390545] env[62820]: DEBUG nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Instance network_info: |[{"id": "b36fcffd-baf9-4baa-a860-018d98ea5451", "address": "fa:16:3e:b4:92:08", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb36fcffd-ba", "ovs_interfaceid": "b36fcffd-baf9-4baa-a860-018d98ea5451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1635.390989] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:92:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d31a554-a94c-4471-892f-f65aa87b8279', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b36fcffd-baf9-4baa-a860-018d98ea5451', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1635.398537] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Creating folder: Project (2cd6822ef42b42b1b90e2cb4e7d20a8b). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1635.398834] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6739bad7-9e0b-4175-9014-65993f41ae1d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.412118] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Created folder: Project (2cd6822ef42b42b1b90e2cb4e7d20a8b) in parent group-v353379. [ 1635.412284] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Creating folder: Instances. Parent ref: group-v353603. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1635.412534] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46917232-1022-4813-8e39-0def4f52361c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.423289] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Created folder: Instances in parent group-v353603. [ 1635.423530] env[62820]: DEBUG oslo.service.loopingcall [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1635.423727] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1635.423935] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fef825d8-cbf1-47aa-a3e0-31c4afd7c701 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.443853] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1635.443853] env[62820]: value = "task-1695943" [ 1635.443853] env[62820]: _type = "Task" [ 1635.443853] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.451630] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695943, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.619023] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.619293] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1635.634799] env[62820]: DEBUG nova.scheduler.client.report [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1635.702339] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695940, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.715762] env[62820]: DEBUG nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1635.743526] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1635.743775] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1635.743932] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1635.744130] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1635.744276] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1635.744423] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1635.744651] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1635.744838] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1635.745024] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1635.745197] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1635.745364] env[62820]: DEBUG nova.virt.hardware [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1635.746324] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264ad9ff-2eb3-4c56-a2f9-8ddedb89feb1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.755148] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673e8601-409a-432d-b6cf-d9d6d6d67085 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.956511] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695943, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.127747] env[62820]: DEBUG nova.compute.manager [req-dcb11ba0-0863-4e17-a61f-0ea5c224e27c req-caffee91-b053-4074-9d7d-57b5f4dff2f6 service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Received event network-vif-plugged-40484755-60da-4d73-a825-a5d4eedee87b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1636.128287] env[62820]: DEBUG oslo_concurrency.lockutils [req-dcb11ba0-0863-4e17-a61f-0ea5c224e27c req-caffee91-b053-4074-9d7d-57b5f4dff2f6 service nova] Acquiring lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.128287] env[62820]: DEBUG oslo_concurrency.lockutils [req-dcb11ba0-0863-4e17-a61f-0ea5c224e27c req-caffee91-b053-4074-9d7d-57b5f4dff2f6 service nova] Lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.128454] env[62820]: DEBUG oslo_concurrency.lockutils [req-dcb11ba0-0863-4e17-a61f-0ea5c224e27c req-caffee91-b053-4074-9d7d-57b5f4dff2f6 service nova] Lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.128570] env[62820]: DEBUG nova.compute.manager [req-dcb11ba0-0863-4e17-a61f-0ea5c224e27c req-caffee91-b053-4074-9d7d-57b5f4dff2f6 service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] No waiting events found dispatching network-vif-plugged-40484755-60da-4d73-a825-a5d4eedee87b {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1636.128755] env[62820]: WARNING nova.compute.manager [req-dcb11ba0-0863-4e17-a61f-0ea5c224e27c req-caffee91-b053-4074-9d7d-57b5f4dff2f6 service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Received unexpected event network-vif-plugged-40484755-60da-4d73-a825-a5d4eedee87b for instance with vm_state building and task_state spawning. [ 1636.140141] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.140672] env[62820]: DEBUG nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1636.143230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.823s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.144597] env[62820]: INFO nova.compute.claims [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1636.201110] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695940, 'name': CloneVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.240356] env[62820]: DEBUG nova.network.neutron [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Successfully updated port: 40484755-60da-4d73-a825-a5d4eedee87b {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1636.455293] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695943, 'name': CreateVM_Task, 'duration_secs': 0.525232} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.455514] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1636.456178] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.456351] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.456689] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1636.456935] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdcd2cbc-0db2-4926-a9fa-6c48208a616f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.461815] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1636.461815] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ad76a9-5676-a0a7-fa2a-56eb6427d6c8" [ 1636.461815] env[62820]: _type = "Task" [ 1636.461815] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.469710] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ad76a9-5676-a0a7-fa2a-56eb6427d6c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.651682] env[62820]: DEBUG nova.compute.utils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1636.653221] env[62820]: DEBUG nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1636.653385] env[62820]: DEBUG nova.network.neutron [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1636.699369] env[62820]: DEBUG nova.policy [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3459ab4d3d642699fa71b52dc1c5416', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2cd6822ef42b42b1b90e2cb4e7d20a8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1636.708891] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695940, 'name': CloneVM_Task, 'duration_secs': 1.082085} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.709249] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Created linked-clone VM from snapshot [ 1636.710307] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33c39be-b877-4936-8560-ca662606dd61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.719137] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Uploading image 72b0793b-33c0-4ec2-a1d2-d9ae2b8d875f {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1636.731764] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1636.732027] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f750366f-4c3a-4d47-8cde-e0e06d1ae9d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.739851] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1636.739851] env[62820]: value = "task-1695944" [ 1636.739851] env[62820]: _type = "Task" [ 1636.739851] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.748505] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "refresh_cache-67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.748711] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "refresh_cache-67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.748866] env[62820]: DEBUG nova.network.neutron [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1636.750015] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695944, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.973303] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ad76a9-5676-a0a7-fa2a-56eb6427d6c8, 'name': SearchDatastore_Task, 'duration_secs': 0.012387} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.973551] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1636.974573] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1636.974573] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.974573] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.974573] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1636.974803] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0bf09df-4d30-42ab-802c-91d878eb6552 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.984026] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1636.984026] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1636.984700] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33fb07c6-18c4-46fd-9d45-3005a3cc2b49 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.990420] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1636.990420] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526b310a-c06e-a5ea-68fe-e4472b13d683" [ 1636.990420] env[62820]: _type = "Task" [ 1636.990420] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.998419] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526b310a-c06e-a5ea-68fe-e4472b13d683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.016111] env[62820]: DEBUG nova.network.neutron [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Successfully created port: 66a18bd3-fb6d-4675-9ccf-44fe90f97e13 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.157010] env[62820]: DEBUG nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1637.252780] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695944, 'name': Destroy_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.283352] env[62820]: DEBUG nova.network.neutron [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1637.422268] env[62820]: DEBUG nova.network.neutron [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Updating instance_info_cache with network_info: [{"id": "40484755-60da-4d73-a825-a5d4eedee87b", "address": "fa:16:3e:ee:f1:87", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40484755-60", "ovs_interfaceid": "40484755-60da-4d73-a825-a5d4eedee87b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.487329] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b9ca9f-5ce5-4a51-80ef-4a30192653bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.497458] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df38518b-6dc0-4df2-b66a-4e87f8e70abb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.504469] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526b310a-c06e-a5ea-68fe-e4472b13d683, 'name': SearchDatastore_Task, 'duration_secs': 0.009736} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.505663] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-272b1832-a7ae-4070-be91-9208340333ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.532331] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af64dca-9a6c-4711-a466-1170aa2e89d0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.536042] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1637.536042] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d2a285-2d5d-20ec-2d41-8440841cc9ca" [ 1637.536042] env[62820]: _type = "Task" [ 1637.536042] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.542805] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d378e61c-22b8-4dae-b3a1-c0a84de5143e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.549723] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d2a285-2d5d-20ec-2d41-8440841cc9ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.561775] env[62820]: DEBUG nova.compute.provider_tree [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1637.754376] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695944, 'name': Destroy_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.925222] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "refresh_cache-67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1637.925622] env[62820]: DEBUG nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Instance network_info: |[{"id": "40484755-60da-4d73-a825-a5d4eedee87b", "address": "fa:16:3e:ee:f1:87", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40484755-60", "ovs_interfaceid": "40484755-60da-4d73-a825-a5d4eedee87b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1637.926253] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:f1:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d31a554-a94c-4471-892f-f65aa87b8279', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40484755-60da-4d73-a825-a5d4eedee87b', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1637.934024] env[62820]: DEBUG oslo.service.loopingcall [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1637.934213] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1637.934438] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8caf7f8c-1bc2-4aff-8dde-3a8b7885666c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.955348] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1637.955348] env[62820]: value = "task-1695945" [ 1637.955348] env[62820]: _type = "Task" [ 1637.955348] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.963531] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695945, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.048189] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d2a285-2d5d-20ec-2d41-8440841cc9ca, 'name': SearchDatastore_Task, 'duration_secs': 0.031302} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.048536] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.048955] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 46434419-d6de-4cc1-905c-14698512b7a5/46434419-d6de-4cc1-905c-14698512b7a5.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1638.049293] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a51d224c-902a-40b3-972a-bc59a91729ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.057250] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1638.057250] env[62820]: value = "task-1695946" [ 1638.057250] env[62820]: _type = "Task" [ 1638.057250] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.069856] env[62820]: DEBUG nova.scheduler.client.report [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1638.073166] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.157613] env[62820]: DEBUG nova.compute.manager [req-88e82da7-0869-4603-b652-071ef55bba35 req-0cfc3460-5340-43f1-900d-fb41a7927eb0 service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Received event network-changed-40484755-60da-4d73-a825-a5d4eedee87b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1638.157825] env[62820]: DEBUG nova.compute.manager [req-88e82da7-0869-4603-b652-071ef55bba35 req-0cfc3460-5340-43f1-900d-fb41a7927eb0 service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Refreshing instance network info cache due to event network-changed-40484755-60da-4d73-a825-a5d4eedee87b. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1638.158070] env[62820]: DEBUG oslo_concurrency.lockutils [req-88e82da7-0869-4603-b652-071ef55bba35 req-0cfc3460-5340-43f1-900d-fb41a7927eb0 service nova] Acquiring lock "refresh_cache-67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.158284] env[62820]: DEBUG oslo_concurrency.lockutils [req-88e82da7-0869-4603-b652-071ef55bba35 req-0cfc3460-5340-43f1-900d-fb41a7927eb0 service nova] Acquired lock "refresh_cache-67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.158499] env[62820]: DEBUG nova.network.neutron [req-88e82da7-0869-4603-b652-071ef55bba35 req-0cfc3460-5340-43f1-900d-fb41a7927eb0 service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Refreshing network info cache for port 40484755-60da-4d73-a825-a5d4eedee87b {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1638.165580] env[62820]: DEBUG nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1638.196070] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1638.196310] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1638.196467] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.196648] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1638.196933] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.196933] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1638.197155] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1638.197313] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1638.197476] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1638.197679] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1638.197862] env[62820]: DEBUG nova.virt.hardware [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1638.198991] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323d8f28-04e5-4e80-9daa-1c30edfc751a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.208077] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59aceb8e-72b1-45a2-92f6-8416fbabbd60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.253962] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695944, 'name': Destroy_Task, 'duration_secs': 1.419695} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.254173] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Destroyed the VM [ 1638.254350] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1638.254601] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c1af3bb1-5e1b-4bc2-90fc-8f74cde15ebc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.264280] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1638.264280] env[62820]: value = "task-1695947" [ 1638.264280] env[62820]: _type = "Task" [ 1638.264280] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.277053] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695947, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.466990] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695945, 'name': CreateVM_Task, 'duration_secs': 0.504854} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.467195] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1638.467962] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.468145] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.468465] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1638.468744] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8eba560a-5cf7-4569-8ced-6426c67cc797 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.474316] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1638.474316] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5285f4bf-1a53-f56b-7e4f-7133af3bfe7a" [ 1638.474316] env[62820]: _type = "Task" [ 1638.474316] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.482952] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5285f4bf-1a53-f56b-7e4f-7133af3bfe7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.562760] env[62820]: DEBUG nova.network.neutron [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Successfully updated port: 66a18bd3-fb6d-4675-9ccf-44fe90f97e13 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1638.570858] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.574844] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.575528] env[62820]: DEBUG nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1638.578632] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.142s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.580117] env[62820]: INFO nova.compute.claims [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1638.777523] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695947, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.924211] env[62820]: DEBUG nova.network.neutron [req-88e82da7-0869-4603-b652-071ef55bba35 req-0cfc3460-5340-43f1-900d-fb41a7927eb0 service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Updated VIF entry in instance network info cache for port 40484755-60da-4d73-a825-a5d4eedee87b. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1638.924599] env[62820]: DEBUG nova.network.neutron [req-88e82da7-0869-4603-b652-071ef55bba35 req-0cfc3460-5340-43f1-900d-fb41a7927eb0 service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Updating instance_info_cache with network_info: [{"id": "40484755-60da-4d73-a825-a5d4eedee87b", "address": "fa:16:3e:ee:f1:87", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40484755-60", "ovs_interfaceid": "40484755-60da-4d73-a825-a5d4eedee87b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.985973] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5285f4bf-1a53-f56b-7e4f-7133af3bfe7a, 'name': SearchDatastore_Task, 'duration_secs': 0.040855} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.986331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.986574] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1638.986805] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.986951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.987144] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1638.987448] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc949f74-e487-406f-a0b8-b7d351c6cdd6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.002349] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1639.002577] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1639.003659] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f263c3e-5a20-4c97-8f50-d938f7ade9cc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.010460] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1639.010460] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526a82de-79dc-52ed-232d-eeab5cd15c29" [ 1639.010460] env[62820]: _type = "Task" [ 1639.010460] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.018904] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526a82de-79dc-52ed-232d-eeab5cd15c29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.069291] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "refresh_cache-5fbb6021-ca7d-4cce-90c9-113b7d833d49" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.069441] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "refresh_cache-5fbb6021-ca7d-4cce-90c9-113b7d833d49" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.069598] env[62820]: DEBUG nova.network.neutron [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1639.071108] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695946, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.084520] env[62820]: DEBUG nova.compute.utils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1639.087899] env[62820]: DEBUG nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1639.088130] env[62820]: DEBUG nova.network.neutron [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1639.135696] env[62820]: DEBUG nova.policy [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '381a74ba1e8f40a387dfd744cdd25ce4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14788b1c55684c2fbd3c07bff18757f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1639.137368] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 1639.137612] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.138170] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.138340] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.138492] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.138717] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.138820] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.138945] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1639.139102] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.277802] env[62820]: DEBUG oslo_vmware.api [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695947, 'name': RemoveSnapshot_Task, 'duration_secs': 0.697413} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.278142] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1639.426072] env[62820]: DEBUG nova.network.neutron [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Successfully created port: 61a1e393-a7d4-4958-b01e-9365a16c1794 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1639.428611] env[62820]: DEBUG oslo_concurrency.lockutils [req-88e82da7-0869-4603-b652-071ef55bba35 req-0cfc3460-5340-43f1-900d-fb41a7927eb0 service nova] Releasing lock "refresh_cache-67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.524283] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526a82de-79dc-52ed-232d-eeab5cd15c29, 'name': SearchDatastore_Task, 'duration_secs': 0.05324} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.524796] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fddcaaea-02cc-4799-af97-7625180e8414 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.532709] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1639.532709] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527f812a-6bf7-8c01-ad46-ff3239e6513a" [ 1639.532709] env[62820]: _type = "Task" [ 1639.532709] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.543025] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527f812a-6bf7-8c01-ad46-ff3239e6513a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.570363] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695946, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.224096} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.570633] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 46434419-d6de-4cc1-905c-14698512b7a5/46434419-d6de-4cc1-905c-14698512b7a5.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1639.570858] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1639.571430] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c69c02c-dcd4-467b-be10-c55c7e55b4de {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.582125] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1639.582125] env[62820]: value = "task-1695948" [ 1639.582125] env[62820]: _type = "Task" [ 1639.582125] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.589372] env[62820]: DEBUG nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1639.602512] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695948, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.606949] env[62820]: DEBUG nova.network.neutron [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1639.642066] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.783959] env[62820]: WARNING nova.compute.manager [None req-0e1089e5-836b-444d-b279-6c2977869f98 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Image not found during snapshot: nova.exception.ImageNotFound: Image 72b0793b-33c0-4ec2-a1d2-d9ae2b8d875f could not be found. [ 1639.804024] env[62820]: DEBUG nova.network.neutron [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Updating instance_info_cache with network_info: [{"id": "66a18bd3-fb6d-4675-9ccf-44fe90f97e13", "address": "fa:16:3e:b1:2e:07", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a18bd3-fb", "ovs_interfaceid": "66a18bd3-fb6d-4675-9ccf-44fe90f97e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.975560] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed036aff-86e7-4277-a955-004e2fe0e832 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.985372] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36565d3-a3ea-4dea-a2b9-9343461a5a7a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.018405] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089953c9-4c5d-422f-8da7-627b9ba999f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.026426] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9d9866-f076-4350-9f77-03d316ff1de4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.041106] env[62820]: DEBUG nova.compute.provider_tree [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1640.051328] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527f812a-6bf7-8c01-ad46-ff3239e6513a, 'name': SearchDatastore_Task, 'duration_secs': 0.010581} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.052162] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.052416] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff/67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1640.053038] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-141284b1-7aa2-4902-9f70-3303a5a5d58c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.061029] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1640.061029] env[62820]: value = "task-1695949" [ 1640.061029] env[62820]: _type = "Task" [ 1640.061029] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.069545] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.092273] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695948, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072579} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.092545] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1640.093343] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bba455-bdea-4bec-ab00-2b2e9122acab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.118825] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 46434419-d6de-4cc1-905c-14698512b7a5/46434419-d6de-4cc1-905c-14698512b7a5.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1640.119339] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c08040f8-63ae-4db8-9996-3d182ca6a91b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.141917] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1640.141917] env[62820]: value = "task-1695950" [ 1640.141917] env[62820]: _type = "Task" [ 1640.141917] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.150952] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695950, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.189191] env[62820]: DEBUG nova.compute.manager [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Received event network-vif-plugged-66a18bd3-fb6d-4675-9ccf-44fe90f97e13 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1640.189191] env[62820]: DEBUG oslo_concurrency.lockutils [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] Acquiring lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.189473] env[62820]: DEBUG oslo_concurrency.lockutils [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] Lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.189662] env[62820]: DEBUG oslo_concurrency.lockutils [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] Lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.189839] env[62820]: DEBUG nova.compute.manager [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] No waiting events found dispatching network-vif-plugged-66a18bd3-fb6d-4675-9ccf-44fe90f97e13 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1640.190881] env[62820]: WARNING nova.compute.manager [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Received unexpected event network-vif-plugged-66a18bd3-fb6d-4675-9ccf-44fe90f97e13 for instance with vm_state building and task_state spawning. [ 1640.190881] env[62820]: DEBUG nova.compute.manager [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Received event network-changed-66a18bd3-fb6d-4675-9ccf-44fe90f97e13 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1640.190881] env[62820]: DEBUG nova.compute.manager [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Refreshing instance network info cache due to event network-changed-66a18bd3-fb6d-4675-9ccf-44fe90f97e13. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1640.190881] env[62820]: DEBUG oslo_concurrency.lockutils [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] Acquiring lock "refresh_cache-5fbb6021-ca7d-4cce-90c9-113b7d833d49" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.305616] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "b6c58867-914e-4e6e-8092-fc8991dc87f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.306871] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "b6c58867-914e-4e6e-8092-fc8991dc87f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.306871] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "b6c58867-914e-4e6e-8092-fc8991dc87f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.306871] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "b6c58867-914e-4e6e-8092-fc8991dc87f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.306871] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "b6c58867-914e-4e6e-8092-fc8991dc87f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.308931] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "refresh_cache-5fbb6021-ca7d-4cce-90c9-113b7d833d49" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.309259] env[62820]: DEBUG nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Instance network_info: |[{"id": "66a18bd3-fb6d-4675-9ccf-44fe90f97e13", "address": "fa:16:3e:b1:2e:07", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a18bd3-fb", "ovs_interfaceid": "66a18bd3-fb6d-4675-9ccf-44fe90f97e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1640.309788] env[62820]: INFO nova.compute.manager [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Terminating instance [ 1640.311322] env[62820]: DEBUG oslo_concurrency.lockutils [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] Acquired lock "refresh_cache-5fbb6021-ca7d-4cce-90c9-113b7d833d49" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.311549] env[62820]: DEBUG nova.network.neutron [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Refreshing network info cache for port 66a18bd3-fb6d-4675-9ccf-44fe90f97e13 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1640.313363] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:2e:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d31a554-a94c-4471-892f-f65aa87b8279', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66a18bd3-fb6d-4675-9ccf-44fe90f97e13', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1640.320882] env[62820]: DEBUG oslo.service.loopingcall [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.323262] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1640.323685] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1d84b62-9bc0-4e8d-b9e7-12e32e646948 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.350234] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1640.350234] env[62820]: value = "task-1695951" [ 1640.350234] env[62820]: _type = "Task" [ 1640.350234] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.364124] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695951, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.385542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "0d519bc8-3cc1-429e-b41b-ed0035622562" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.385542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "0d519bc8-3cc1-429e-b41b-ed0035622562" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.385542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "0d519bc8-3cc1-429e-b41b-ed0035622562-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.385542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "0d519bc8-3cc1-429e-b41b-ed0035622562-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.385542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "0d519bc8-3cc1-429e-b41b-ed0035622562-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.387778] env[62820]: INFO nova.compute.manager [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Terminating instance [ 1640.546717] env[62820]: DEBUG nova.scheduler.client.report [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1640.572217] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695949, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.621533] env[62820]: DEBUG nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1640.649070] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1640.649322] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1640.649474] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1640.649652] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1640.649798] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1640.649943] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1640.650166] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1640.650366] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1640.650498] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1640.650702] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1640.650862] env[62820]: DEBUG nova.virt.hardware [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1640.651785] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69816dca-5bcd-4de4-ba6a-fac6617b7503 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.662470] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82e9959-b95e-4f31-8ae9-032250f73103 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.666539] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695950, 'name': ReconfigVM_Task, 'duration_secs': 0.498356} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.666807] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 46434419-d6de-4cc1-905c-14698512b7a5/46434419-d6de-4cc1-905c-14698512b7a5.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1640.667803] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7e6a566-13ef-4bc9-94c7-be978e047597 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.681468] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1640.681468] env[62820]: value = "task-1695952" [ 1640.681468] env[62820]: _type = "Task" [ 1640.681468] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.689471] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695952, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.824968] env[62820]: DEBUG nova.compute.manager [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1640.825344] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1640.826120] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e171cf-4169-4e06-a202-471936f62c9f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.834606] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1640.834873] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f07db83b-0e6d-4227-9442-bb0331663248 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.840717] env[62820]: DEBUG oslo_vmware.api [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1640.840717] env[62820]: value = "task-1695953" [ 1640.840717] env[62820]: _type = "Task" [ 1640.840717] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.848704] env[62820]: DEBUG oslo_vmware.api [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695953, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.858512] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695951, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.893826] env[62820]: DEBUG nova.compute.manager [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1640.894069] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1640.894997] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11152a3d-556b-4ac7-a373-57aac13887ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.905592] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1640.905833] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25006f8c-a42e-4a41-8990-95ddafafc3ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.912393] env[62820]: DEBUG oslo_vmware.api [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1640.912393] env[62820]: value = "task-1695954" [ 1640.912393] env[62820]: _type = "Task" [ 1640.912393] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.920222] env[62820]: DEBUG oslo_vmware.api [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.949841] env[62820]: DEBUG nova.network.neutron [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Successfully updated port: 61a1e393-a7d4-4958-b01e-9365a16c1794 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1641.052519] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.053061] env[62820]: DEBUG nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1641.056348] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.978s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.056653] env[62820]: DEBUG nova.objects.instance [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lazy-loading 'resources' on Instance uuid 492db939-78f4-4642-89dd-a01fa94f41b5 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1641.071677] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512961} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.071925] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff/67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1641.072154] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1641.072405] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0a572ab-a601-44b5-81b6-dd9866f7b35b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.083229] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1641.083229] env[62820]: value = "task-1695955" [ 1641.083229] env[62820]: _type = "Task" [ 1641.083229] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.084807] env[62820]: DEBUG nova.network.neutron [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Updated VIF entry in instance network info cache for port 66a18bd3-fb6d-4675-9ccf-44fe90f97e13. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1641.085179] env[62820]: DEBUG nova.network.neutron [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Updating instance_info_cache with network_info: [{"id": "66a18bd3-fb6d-4675-9ccf-44fe90f97e13", "address": "fa:16:3e:b1:2e:07", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a18bd3-fb", "ovs_interfaceid": "66a18bd3-fb6d-4675-9ccf-44fe90f97e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.092226] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695955, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.191780] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695952, 'name': Rename_Task, 'duration_secs': 0.161608} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.191987] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1641.192244] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce6da160-229b-4d7b-90d6-d96cfa39ff62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.199057] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1641.199057] env[62820]: value = "task-1695956" [ 1641.199057] env[62820]: _type = "Task" [ 1641.199057] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.206342] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.350649] env[62820]: DEBUG oslo_vmware.api [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695953, 'name': PowerOffVM_Task, 'duration_secs': 0.245961} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.351024] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1641.351246] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1641.351568] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0dd710a5-c25f-40c3-82bc-acbb9e7c3fdf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.360522] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695951, 'name': CreateVM_Task, 'duration_secs': 0.513767} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.360710] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1641.361385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.361545] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.361853] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1641.362098] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df1896b1-df1f-4c42-989f-c92bbdba9507 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.366434] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1641.366434] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c1dbce-e409-e8bf-aded-3d9bc742f1a3" [ 1641.366434] env[62820]: _type = "Task" [ 1641.366434] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.375417] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c1dbce-e409-e8bf-aded-3d9bc742f1a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.423849] env[62820]: DEBUG oslo_vmware.api [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695954, 'name': PowerOffVM_Task, 'duration_secs': 0.231409} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.424184] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1641.424360] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1641.424623] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0f22c69-3625-42e5-a1ba-700d7ea35ca8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.432679] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1641.432904] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1641.433141] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Deleting the datastore file [datastore1] b6c58867-914e-4e6e-8092-fc8991dc87f7 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1641.433388] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a52cdfe-435a-4342-9a06-5f8adb5c7892 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.440504] env[62820]: DEBUG oslo_vmware.api [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for the task: (returnval){ [ 1641.440504] env[62820]: value = "task-1695959" [ 1641.440504] env[62820]: _type = "Task" [ 1641.440504] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.448066] env[62820]: DEBUG oslo_vmware.api [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.452907] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "refresh_cache-10f4cf46-89d2-4ac4-91d5-6626212f4f8e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.452907] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "refresh_cache-10f4cf46-89d2-4ac4-91d5-6626212f4f8e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.453057] env[62820]: DEBUG nova.network.neutron [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1641.500393] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1641.500677] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1641.500902] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleting the datastore file [datastore1] 0d519bc8-3cc1-429e-b41b-ed0035622562 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1641.501190] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-270fd943-9039-479e-9892-5c4327550d9d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.507183] env[62820]: DEBUG oslo_vmware.api [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for the task: (returnval){ [ 1641.507183] env[62820]: value = "task-1695960" [ 1641.507183] env[62820]: _type = "Task" [ 1641.507183] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.514720] env[62820]: DEBUG oslo_vmware.api [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.560718] env[62820]: DEBUG nova.compute.utils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1641.562071] env[62820]: DEBUG nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1641.562248] env[62820]: DEBUG nova.network.neutron [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1641.590106] env[62820]: DEBUG oslo_concurrency.lockutils [req-828b8a55-1317-47a9-bafb-e55753d60f57 req-5d649309-c920-4e0b-b06f-007a13850cc2 service nova] Releasing lock "refresh_cache-5fbb6021-ca7d-4cce-90c9-113b7d833d49" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.595507] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695955, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094146} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.595912] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1641.597070] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e41b0bc-5113-4c05-88da-815dae925ea6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.622542] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff/67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1641.626242] env[62820]: DEBUG nova.policy [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc838df5682041ed97e19ce34d9f14ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a07ed2a19149b3a58ee43a07e13bba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1641.627531] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-507d61ac-bf37-4eba-94f3-fba7be837721 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.647500] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1641.647500] env[62820]: value = "task-1695961" [ 1641.647500] env[62820]: _type = "Task" [ 1641.647500] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.660450] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695961, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.709465] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695956, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.878780] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c1dbce-e409-e8bf-aded-3d9bc742f1a3, 'name': SearchDatastore_Task, 'duration_secs': 0.010278} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.881729] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.881979] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1641.882231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.882375] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.882550] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1641.882970] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-588ff1e1-80fe-41a9-b456-f3a488c8a99b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.891419] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1641.891620] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1641.894532] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ab07470-e4a0-469c-a3fd-1d4375278250 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.900275] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1641.900275] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52240b9b-b404-181a-4017-fe478ebc0417" [ 1641.900275] env[62820]: _type = "Task" [ 1641.900275] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.907856] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52240b9b-b404-181a-4017-fe478ebc0417, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.941208] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e825d7-863b-4a50-b213-f8db751ac952 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.944522] env[62820]: DEBUG nova.network.neutron [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Successfully created port: 7cc832bc-1ef4-4db9-9b80-de3bcd73a298 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1641.956982] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7161bca8-5a1f-46ac-8888-e272ce83a364 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.961602] env[62820]: DEBUG oslo_vmware.api [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Task: {'id': task-1695959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208528} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.961832] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1641.962297] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1641.962297] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1641.962426] env[62820]: INFO nova.compute.manager [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1641.962595] env[62820]: DEBUG oslo.service.loopingcall [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1641.963673] env[62820]: DEBUG nova.compute.manager [-] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1641.963673] env[62820]: DEBUG nova.network.neutron [-] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1641.989497] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632361f5-cae0-489a-9f37-2c043a75d1eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.997074] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b6c0ef-f3bd-4b17-acbc-44ff2d5e98f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.010799] env[62820]: DEBUG nova.compute.provider_tree [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1642.012704] env[62820]: DEBUG nova.network.neutron [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1642.020643] env[62820]: DEBUG nova.scheduler.client.report [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1642.031875] env[62820]: DEBUG oslo_vmware.api [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Task: {'id': task-1695960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154167} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.032157] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1642.032373] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1642.032610] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1642.032824] env[62820]: INFO nova.compute.manager [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1642.033104] env[62820]: DEBUG oslo.service.loopingcall [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1642.033295] env[62820]: DEBUG nova.compute.manager [-] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1642.033387] env[62820]: DEBUG nova.network.neutron [-] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1642.066215] env[62820]: DEBUG nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1642.160841] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.211079] env[62820]: DEBUG oslo_vmware.api [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695956, 'name': PowerOnVM_Task, 'duration_secs': 0.519912} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.211559] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1642.211674] env[62820]: INFO nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Took 9.91 seconds to spawn the instance on the hypervisor. [ 1642.211823] env[62820]: DEBUG nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1642.212604] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9427ba71-734b-4f11-84db-1da86be73743 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.330080] env[62820]: DEBUG nova.compute.manager [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Received event network-vif-plugged-61a1e393-a7d4-4958-b01e-9365a16c1794 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1642.330080] env[62820]: DEBUG oslo_concurrency.lockutils [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] Acquiring lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.330080] env[62820]: DEBUG oslo_concurrency.lockutils [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] Lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.330080] env[62820]: DEBUG oslo_concurrency.lockutils [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] Lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.330080] env[62820]: DEBUG nova.compute.manager [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] No waiting events found dispatching network-vif-plugged-61a1e393-a7d4-4958-b01e-9365a16c1794 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1642.330080] env[62820]: WARNING nova.compute.manager [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Received unexpected event network-vif-plugged-61a1e393-a7d4-4958-b01e-9365a16c1794 for instance with vm_state building and task_state spawning. [ 1642.330080] env[62820]: DEBUG nova.compute.manager [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Received event network-changed-61a1e393-a7d4-4958-b01e-9365a16c1794 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1642.330080] env[62820]: DEBUG nova.compute.manager [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Refreshing instance network info cache due to event network-changed-61a1e393-a7d4-4958-b01e-9365a16c1794. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1642.330080] env[62820]: DEBUG oslo_concurrency.lockutils [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] Acquiring lock "refresh_cache-10f4cf46-89d2-4ac4-91d5-6626212f4f8e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.412648] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52240b9b-b404-181a-4017-fe478ebc0417, 'name': SearchDatastore_Task, 'duration_secs': 0.008769} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.414544] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a31724b9-a6e2-4232-a9b1-dbb0c779501b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.423023] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1642.423023] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ddc86a-bcec-f23f-0a7b-085bcaaea9c3" [ 1642.423023] env[62820]: _type = "Task" [ 1642.423023] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.432363] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ddc86a-bcec-f23f-0a7b-085bcaaea9c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.453527] env[62820]: DEBUG nova.network.neutron [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Updating instance_info_cache with network_info: [{"id": "61a1e393-a7d4-4958-b01e-9365a16c1794", "address": "fa:16:3e:d9:ad:d7", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a1e393-a7", "ovs_interfaceid": "61a1e393-a7d4-4958-b01e-9365a16c1794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.526915] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.470s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.533197] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.158s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.533197] env[62820]: DEBUG nova.objects.instance [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lazy-loading 'resources' on Instance uuid ba5b0055-b756-4f80-ba6b-7e8b705d2970 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1642.548839] env[62820]: INFO nova.scheduler.client.report [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Deleted allocations for instance 492db939-78f4-4642-89dd-a01fa94f41b5 [ 1642.659808] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695961, 'name': ReconfigVM_Task, 'duration_secs': 0.913687} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.660670] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff/67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1642.661294] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c99faa14-1438-4e35-93a5-13d3e37d6111 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.668909] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1642.668909] env[62820]: value = "task-1695962" [ 1642.668909] env[62820]: _type = "Task" [ 1642.668909] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.682334] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695962, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.733029] env[62820]: INFO nova.compute.manager [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Took 29.83 seconds to build instance. [ 1642.932297] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ddc86a-bcec-f23f-0a7b-085bcaaea9c3, 'name': SearchDatastore_Task, 'duration_secs': 0.0095} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.932827] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.932827] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 5fbb6021-ca7d-4cce-90c9-113b7d833d49/5fbb6021-ca7d-4cce-90c9-113b7d833d49.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1642.933107] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51190c76-80cf-4f97-8b05-1a40d7236977 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.939416] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1642.939416] env[62820]: value = "task-1695963" [ 1642.939416] env[62820]: _type = "Task" [ 1642.939416] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.947545] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.956687] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "refresh_cache-10f4cf46-89d2-4ac4-91d5-6626212f4f8e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.957045] env[62820]: DEBUG nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Instance network_info: |[{"id": "61a1e393-a7d4-4958-b01e-9365a16c1794", "address": "fa:16:3e:d9:ad:d7", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a1e393-a7", "ovs_interfaceid": "61a1e393-a7d4-4958-b01e-9365a16c1794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1642.957353] env[62820]: DEBUG oslo_concurrency.lockutils [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] Acquired lock "refresh_cache-10f4cf46-89d2-4ac4-91d5-6626212f4f8e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.957606] env[62820]: DEBUG nova.network.neutron [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Refreshing network info cache for port 61a1e393-a7d4-4958-b01e-9365a16c1794 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1642.958829] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:ad:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61a1e393-a7d4-4958-b01e-9365a16c1794', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1642.966627] env[62820]: DEBUG oslo.service.loopingcall [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1642.967164] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1642.967389] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57701cb5-9d81-40fe-9b0d-12e153347aae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.988876] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1642.988876] env[62820]: value = "task-1695964" [ 1642.988876] env[62820]: _type = "Task" [ 1642.988876] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.996900] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695964, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.059604] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2a745a13-958f-4e42-a31a-d5f240e2c043 tempest-SecurityGroupsTestJSON-675110079 tempest-SecurityGroupsTestJSON-675110079-project-member] Lock "492db939-78f4-4642-89dd-a01fa94f41b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.439s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.075774] env[62820]: DEBUG nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1643.107541] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1643.107880] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1643.108074] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1643.108268] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1643.108417] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1643.108599] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1643.108808] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1643.108965] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1643.109194] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1643.109364] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1643.109539] env[62820]: DEBUG nova.virt.hardware [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1643.110794] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed6d00f-e0cc-4fbb-81e1-3aa07588552b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.124018] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5087abc-3f60-4aa7-8350-496d3bfe8b36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.153829] env[62820]: DEBUG nova.network.neutron [-] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.181980] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695962, 'name': Rename_Task, 'duration_secs': 0.270472} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.184983] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1643.185505] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb2de6b7-f9db-44c2-b63c-1c568da6518f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.194681] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1643.194681] env[62820]: value = "task-1695965" [ 1643.194681] env[62820]: _type = "Task" [ 1643.194681] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.209693] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.234386] env[62820]: DEBUG oslo_concurrency.lockutils [None req-961f238a-22b7-43b8-87b7-e8033dabe12f tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "46434419-d6de-4cc1-905c-14698512b7a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.335s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.317474] env[62820]: DEBUG nova.network.neutron [-] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.413778] env[62820]: DEBUG nova.compute.manager [req-c22b78d4-db4a-4afe-bfea-037b2d2c8067 req-2d14c6c1-9196-4b56-bba8-1208091494a2 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Received event network-vif-plugged-7cc832bc-1ef4-4db9-9b80-de3bcd73a298 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1643.414053] env[62820]: DEBUG oslo_concurrency.lockutils [req-c22b78d4-db4a-4afe-bfea-037b2d2c8067 req-2d14c6c1-9196-4b56-bba8-1208091494a2 service nova] Acquiring lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.414373] env[62820]: DEBUG oslo_concurrency.lockutils [req-c22b78d4-db4a-4afe-bfea-037b2d2c8067 req-2d14c6c1-9196-4b56-bba8-1208091494a2 service nova] Lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.414453] env[62820]: DEBUG oslo_concurrency.lockutils [req-c22b78d4-db4a-4afe-bfea-037b2d2c8067 req-2d14c6c1-9196-4b56-bba8-1208091494a2 service nova] Lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.414644] env[62820]: DEBUG nova.compute.manager [req-c22b78d4-db4a-4afe-bfea-037b2d2c8067 req-2d14c6c1-9196-4b56-bba8-1208091494a2 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] No waiting events found dispatching network-vif-plugged-7cc832bc-1ef4-4db9-9b80-de3bcd73a298 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1643.415659] env[62820]: WARNING nova.compute.manager [req-c22b78d4-db4a-4afe-bfea-037b2d2c8067 req-2d14c6c1-9196-4b56-bba8-1208091494a2 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Received unexpected event network-vif-plugged-7cc832bc-1ef4-4db9-9b80-de3bcd73a298 for instance with vm_state building and task_state spawning. [ 1643.449408] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc8e169-382f-4edb-8b26-45c25467b112 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.457076] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695963, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.462197] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a0b3f8-3588-41ac-97b4-b640eeed31fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.511916] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1fb78b-e5ed-48ed-9cf1-c9d6bde8c00e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.526680] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695964, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.527717] env[62820]: DEBUG nova.network.neutron [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Successfully updated port: 7cc832bc-1ef4-4db9-9b80-de3bcd73a298 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1643.529860] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26afdb45-b1b6-491a-ac86-f37bb2241810 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.548666] env[62820]: DEBUG nova.compute.provider_tree [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1643.657041] env[62820]: INFO nova.compute.manager [-] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Took 1.62 seconds to deallocate network for instance. [ 1643.705143] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695965, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.771176] env[62820]: DEBUG nova.network.neutron [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Updated VIF entry in instance network info cache for port 61a1e393-a7d4-4958-b01e-9365a16c1794. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1643.771560] env[62820]: DEBUG nova.network.neutron [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Updating instance_info_cache with network_info: [{"id": "61a1e393-a7d4-4958-b01e-9365a16c1794", "address": "fa:16:3e:d9:ad:d7", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a1e393-a7", "ovs_interfaceid": "61a1e393-a7d4-4958-b01e-9365a16c1794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.821521] env[62820]: INFO nova.compute.manager [-] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Took 1.86 seconds to deallocate network for instance. [ 1643.954281] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514347} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.954576] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 5fbb6021-ca7d-4cce-90c9-113b7d833d49/5fbb6021-ca7d-4cce-90c9-113b7d833d49.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1643.954801] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1643.955134] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79eb61b7-fb27-49ed-b0e0-1695b2385729 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.962133] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1643.962133] env[62820]: value = "task-1695966" [ 1643.962133] env[62820]: _type = "Task" [ 1643.962133] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.970371] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.022320] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695964, 'name': CreateVM_Task, 'duration_secs': 0.748008} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.022550] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1644.023480] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.023718] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.024173] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1644.024656] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0f2cecf-ad2f-47be-ad4a-a54db268d4d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.030929] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1644.030929] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ed5635-26be-560d-5bac-4f080a11e210" [ 1644.030929] env[62820]: _type = "Task" [ 1644.030929] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.036375] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.036585] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.036812] env[62820]: DEBUG nova.network.neutron [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1644.045800] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ed5635-26be-560d-5bac-4f080a11e210, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.073812] env[62820]: ERROR nova.scheduler.client.report [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [req-d0ec2509-b25e-4f5d-be56-bb2630cd3871] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d0ec2509-b25e-4f5d-be56-bb2630cd3871"}]} [ 1644.090865] env[62820]: DEBUG nova.scheduler.client.report [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1644.106402] env[62820]: DEBUG nova.scheduler.client.report [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1644.106730] env[62820]: DEBUG nova.compute.provider_tree [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1644.118768] env[62820]: DEBUG nova.scheduler.client.report [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1644.140406] env[62820]: DEBUG nova.scheduler.client.report [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1644.169473] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.214082] env[62820]: DEBUG oslo_vmware.api [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695965, 'name': PowerOnVM_Task, 'duration_secs': 0.562486} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.217707] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1644.217993] env[62820]: INFO nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Took 8.50 seconds to spawn the instance on the hypervisor. [ 1644.218222] env[62820]: DEBUG nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1644.223196] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9a0e4c-aecd-425d-a3ec-463ab0cd9579 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.276110] env[62820]: DEBUG oslo_concurrency.lockutils [req-2b6a2e83-b28c-4441-a5bb-c7ad35ded4c9 req-10a0cd1e-231d-4121-b455-985d7a411334 service nova] Releasing lock "refresh_cache-10f4cf46-89d2-4ac4-91d5-6626212f4f8e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.331643] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.361958] env[62820]: DEBUG nova.compute.manager [req-9a49db6f-879c-4ab9-a5a5-3ac140f11f8c req-fa6c4965-1600-42ce-8649-5dd51bcbed08 service nova] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Received event network-vif-deleted-4cf9f650-b061-4b02-bf65-8379061b1938 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1644.362106] env[62820]: DEBUG nova.compute.manager [req-9a49db6f-879c-4ab9-a5a5-3ac140f11f8c req-fa6c4965-1600-42ce-8649-5dd51bcbed08 service nova] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Received event network-vif-deleted-52edbaed-89b5-4d7c-9398-b22a3e8b22be {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1644.474571] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.545974] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ed5635-26be-560d-5bac-4f080a11e210, 'name': SearchDatastore_Task, 'duration_secs': 0.438492} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.546871] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.547171] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1644.547373] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.547490] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.547759] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1644.548340] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-337c7ec1-b41d-48f5-b10d-a9e67756940f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.551770] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257e853e-bc4f-4d3d-a31a-cfc3dde6eaa1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.560177] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee670e5-a4de-4d62-bfb5-9a268d51b8ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.603530] env[62820]: DEBUG nova.network.neutron [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1644.607278] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8946e37-26b1-4a65-8c0f-7c9b0d8c724e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.610265] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.610439] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1644.611420] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-332a8e4e-1844-430c-b0cf-c85fe46df3c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.620973] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7ef798-db34-4868-a935-656025603f11 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.624787] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1644.624787] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5281972d-ef8d-6250-8bc5-f3218539c5d7" [ 1644.624787] env[62820]: _type = "Task" [ 1644.624787] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.637241] env[62820]: DEBUG nova.compute.provider_tree [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1644.646275] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5281972d-ef8d-6250-8bc5-f3218539c5d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.750032] env[62820]: INFO nova.compute.manager [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Took 31.59 seconds to build instance. [ 1644.846947] env[62820]: DEBUG nova.network.neutron [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Updating instance_info_cache with network_info: [{"id": "7cc832bc-1ef4-4db9-9b80-de3bcd73a298", "address": "fa:16:3e:04:d3:4d", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc832bc-1e", "ovs_interfaceid": "7cc832bc-1ef4-4db9-9b80-de3bcd73a298", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.973128] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.136797] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5281972d-ef8d-6250-8bc5-f3218539c5d7, 'name': SearchDatastore_Task, 'duration_secs': 0.108232} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.137657] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0855a659-875f-460c-b300-5ba410a69b6b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.147473] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1645.147473] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52af66b2-03d8-26a9-4d7c-aa898fbc3177" [ 1645.147473] env[62820]: _type = "Task" [ 1645.147473] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.159769] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52af66b2-03d8-26a9-4d7c-aa898fbc3177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.186903] env[62820]: DEBUG nova.scheduler.client.report [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 110 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1645.187259] env[62820]: DEBUG nova.compute.provider_tree [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 110 to 111 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1645.188204] env[62820]: DEBUG nova.compute.provider_tree [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1645.251840] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3bd99de1-d194-4ee4-a24c-224105e47c85 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.101s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.353217] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.353581] env[62820]: DEBUG nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Instance network_info: |[{"id": "7cc832bc-1ef4-4db9-9b80-de3bcd73a298", "address": "fa:16:3e:04:d3:4d", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc832bc-1e", "ovs_interfaceid": "7cc832bc-1ef4-4db9-9b80-de3bcd73a298", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1645.354218] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:d3:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a874c214-8cdf-4a41-a718-84262b2a28d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cc832bc-1ef4-4db9-9b80-de3bcd73a298', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1645.362985] env[62820]: DEBUG oslo.service.loopingcall [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1645.363306] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1645.363570] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-513d51b9-d1f8-4aee-bb80-edeb7b3a5a7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.385619] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1645.385619] env[62820]: value = "task-1695967" [ 1645.385619] env[62820]: _type = "Task" [ 1645.385619] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.399374] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695967, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.441526] env[62820]: DEBUG nova.compute.manager [req-ec51efa9-c7bc-4073-a6e2-a8d310603b1f req-b51a9ab5-dce8-429d-981a-c714ebd2a235 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Received event network-changed-7cc832bc-1ef4-4db9-9b80-de3bcd73a298 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1645.441787] env[62820]: DEBUG nova.compute.manager [req-ec51efa9-c7bc-4073-a6e2-a8d310603b1f req-b51a9ab5-dce8-429d-981a-c714ebd2a235 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Refreshing instance network info cache due to event network-changed-7cc832bc-1ef4-4db9-9b80-de3bcd73a298. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1645.442404] env[62820]: DEBUG oslo_concurrency.lockutils [req-ec51efa9-c7bc-4073-a6e2-a8d310603b1f req-b51a9ab5-dce8-429d-981a-c714ebd2a235 service nova] Acquiring lock "refresh_cache-78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.442404] env[62820]: DEBUG oslo_concurrency.lockutils [req-ec51efa9-c7bc-4073-a6e2-a8d310603b1f req-b51a9ab5-dce8-429d-981a-c714ebd2a235 service nova] Acquired lock "refresh_cache-78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.442404] env[62820]: DEBUG nova.network.neutron [req-ec51efa9-c7bc-4073-a6e2-a8d310603b1f req-b51a9ab5-dce8-429d-981a-c714ebd2a235 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Refreshing network info cache for port 7cc832bc-1ef4-4db9-9b80-de3bcd73a298 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1645.477521] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.661264] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52af66b2-03d8-26a9-4d7c-aa898fbc3177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.696442] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.167s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.699936] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.456s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.699936] env[62820]: DEBUG nova.objects.instance [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lazy-loading 'resources' on Instance uuid c0d14c00-2c93-490c-8b17-91d3b5ee5b3d {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1645.721177] env[62820]: INFO nova.scheduler.client.report [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted allocations for instance ba5b0055-b756-4f80-ba6b-7e8b705d2970 [ 1645.896010] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695967, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.981863] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.159263] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52af66b2-03d8-26a9-4d7c-aa898fbc3177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.236223] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cd33abfa-71f9-4138-ac6f-af85f4bdf15f tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "ba5b0055-b756-4f80-ba6b-7e8b705d2970" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.334s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.250885] env[62820]: DEBUG nova.network.neutron [req-ec51efa9-c7bc-4073-a6e2-a8d310603b1f req-b51a9ab5-dce8-429d-981a-c714ebd2a235 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Updated VIF entry in instance network info cache for port 7cc832bc-1ef4-4db9-9b80-de3bcd73a298. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1646.251348] env[62820]: DEBUG nova.network.neutron [req-ec51efa9-c7bc-4073-a6e2-a8d310603b1f req-b51a9ab5-dce8-429d-981a-c714ebd2a235 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Updating instance_info_cache with network_info: [{"id": "7cc832bc-1ef4-4db9-9b80-de3bcd73a298", "address": "fa:16:3e:04:d3:4d", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc832bc-1e", "ovs_interfaceid": "7cc832bc-1ef4-4db9-9b80-de3bcd73a298", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.396703] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695967, 'name': CreateVM_Task, 'duration_secs': 0.70562} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.399334] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1646.400530] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.400713] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.401068] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1646.401339] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48eb1e00-b03f-4b67-82ec-e9dd1d89e7fe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.406615] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1646.406615] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523d5c1d-afb4-26c9-c83b-bb9af668ad1a" [ 1646.406615] env[62820]: _type = "Task" [ 1646.406615] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.417236] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523d5c1d-afb4-26c9-c83b-bb9af668ad1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.480104] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695966, 'name': ExtendVirtualDisk_Task, 'duration_secs': 2.32264} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.482532] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1646.483588] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c0bc77-1637-4240-974e-0eb129861fbe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.506225] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 5fbb6021-ca7d-4cce-90c9-113b7d833d49/5fbb6021-ca7d-4cce-90c9-113b7d833d49.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1646.508956] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-575f68e3-93ac-46e9-8eb1-5b1261e73b9c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.528580] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1646.528580] env[62820]: value = "task-1695968" [ 1646.528580] env[62820]: _type = "Task" [ 1646.528580] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.538550] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.546208] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abc542b-9351-4786-8155-c2edc61d334a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.553351] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824a4a49-6920-4ff3-b936-23536057a731 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.584659] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-524bce52-79ad-4818-af4c-b25bdb52bf16 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.592193] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75f99eb-0d25-4e85-bc9f-2c81c8585f7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.605222] env[62820]: DEBUG nova.compute.provider_tree [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.659477] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52af66b2-03d8-26a9-4d7c-aa898fbc3177, 'name': SearchDatastore_Task, 'duration_secs': 1.099866} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.659660] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.659942] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 10f4cf46-89d2-4ac4-91d5-6626212f4f8e/10f4cf46-89d2-4ac4-91d5-6626212f4f8e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1646.660210] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48f3faec-880e-409d-94c6-2713233db441 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.666666] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1646.666666] env[62820]: value = "task-1695969" [ 1646.666666] env[62820]: _type = "Task" [ 1646.666666] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.674819] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695969, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.754174] env[62820]: DEBUG oslo_concurrency.lockutils [req-ec51efa9-c7bc-4073-a6e2-a8d310603b1f req-b51a9ab5-dce8-429d-981a-c714ebd2a235 service nova] Releasing lock "refresh_cache-78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.918221] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523d5c1d-afb4-26c9-c83b-bb9af668ad1a, 'name': SearchDatastore_Task, 'duration_secs': 0.009439} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.918504] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.918794] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1646.919054] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.919210] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.919395] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1646.919677] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-384018ab-f668-4599-8262-c13ff5611ae6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.932964] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1646.933167] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1646.934030] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ec9c771-aa36-45d9-ac22-95d1588d90af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.941099] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1646.941099] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b99bef-97e7-6884-6deb-b4a2e579792e" [ 1646.941099] env[62820]: _type = "Task" [ 1646.941099] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.950079] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b99bef-97e7-6884-6deb-b4a2e579792e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.044916] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695968, 'name': ReconfigVM_Task, 'duration_secs': 0.28809} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.045465] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 5fbb6021-ca7d-4cce-90c9-113b7d833d49/5fbb6021-ca7d-4cce-90c9-113b7d833d49.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1647.046666] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1869389-0272-4e75-abae-90ffd6771484 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.056771] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1647.056771] env[62820]: value = "task-1695970" [ 1647.056771] env[62820]: _type = "Task" [ 1647.056771] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.070720] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695970, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.108318] env[62820]: DEBUG nova.scheduler.client.report [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1647.178911] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695969, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486385} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.179214] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 10f4cf46-89d2-4ac4-91d5-6626212f4f8e/10f4cf46-89d2-4ac4-91d5-6626212f4f8e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1647.179456] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1647.179704] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fcf2522-9f57-4d2c-8902-599a95ef1450 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.185892] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1647.185892] env[62820]: value = "task-1695971" [ 1647.185892] env[62820]: _type = "Task" [ 1647.185892] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.193779] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695971, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.451897] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b99bef-97e7-6884-6deb-b4a2e579792e, 'name': SearchDatastore_Task, 'duration_secs': 0.059639} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.452639] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eae92b1-9cac-45e6-9fb1-24aa3ba627cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.457998] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1647.457998] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52977e52-3b41-a337-423d-e972b4e70668" [ 1647.457998] env[62820]: _type = "Task" [ 1647.457998] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.465204] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52977e52-3b41-a337-423d-e972b4e70668, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.566827] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695970, 'name': Rename_Task, 'duration_secs': 0.194265} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.567100] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1647.567337] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c3a73df-b588-4dfe-b01e-23b5ed0b7810 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.574435] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1647.574435] env[62820]: value = "task-1695972" [ 1647.574435] env[62820]: _type = "Task" [ 1647.574435] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.586477] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.614424] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.916s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.617075] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.573s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.617185] env[62820]: DEBUG nova.objects.instance [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lazy-loading 'resources' on Instance uuid 4ae63ae5-0306-4540-be88-6e7d909c38a3 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1647.636430] env[62820]: INFO nova.scheduler.client.report [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Deleted allocations for instance c0d14c00-2c93-490c-8b17-91d3b5ee5b3d [ 1647.696100] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695971, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09091} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.696381] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1647.697153] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7caa191a-d2ba-48bf-ab00-ca3e2fe1e96a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.718657] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 10f4cf46-89d2-4ac4-91d5-6626212f4f8e/10f4cf46-89d2-4ac4-91d5-6626212f4f8e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1647.719142] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4796f7fd-0e54-4695-9d2a-cff5e3cfba64 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.738280] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1647.738280] env[62820]: value = "task-1695973" [ 1647.738280] env[62820]: _type = "Task" [ 1647.738280] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.746009] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695973, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.968383] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52977e52-3b41-a337-423d-e972b4e70668, 'name': SearchDatastore_Task, 'duration_secs': 0.009178} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.968599] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.968917] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea/78d9c7ad-af34-4e84-bd0c-d0bf287be0ea.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1647.969182] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-226d10a7-8258-408f-aa03-cef11da6cbfc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.975209] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1647.975209] env[62820]: value = "task-1695974" [ 1647.975209] env[62820]: _type = "Task" [ 1647.975209] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.983115] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.084752] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695972, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.147786] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2f510fa3-c0fe-4486-a538-643461164640 tempest-ServerAddressesTestJSON-2067252962 tempest-ServerAddressesTestJSON-2067252962-project-member] Lock "c0d14c00-2c93-490c-8b17-91d3b5ee5b3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.379s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.248459] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695973, 'name': ReconfigVM_Task, 'duration_secs': 0.439912} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.250720] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 10f4cf46-89d2-4ac4-91d5-6626212f4f8e/10f4cf46-89d2-4ac4-91d5-6626212f4f8e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1648.251647] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3e0eb2a-d327-4a85-8550-4b8a5bf4f2e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.258018] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1648.258018] env[62820]: value = "task-1695975" [ 1648.258018] env[62820]: _type = "Task" [ 1648.258018] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.268575] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695975, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.392448] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1a9ba1-4602-4213-95d8-716f95163e89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.400818] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80367343-065e-4411-920f-121c12f59570 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.431859] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf915d7-dbb4-4549-9733-bde71dd08b0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.439363] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a84e055-b071-4fa6-82a8-aace997e6f5a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.452865] env[62820]: DEBUG nova.compute.provider_tree [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1648.485344] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695974, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.587334] env[62820]: DEBUG oslo_vmware.api [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695972, 'name': PowerOnVM_Task, 'duration_secs': 0.564035} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.587608] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1648.587829] env[62820]: INFO nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Took 10.42 seconds to spawn the instance on the hypervisor. [ 1648.588029] env[62820]: DEBUG nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1648.588881] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9ccc2e-7313-4bc0-a05f-1de510edd62b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.769529] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695975, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.987153] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.855539} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.987323] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea/78d9c7ad-af34-4e84-bd0c-d0bf287be0ea.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1648.987528] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1648.987823] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ba2468f-133e-4072-b5e2-3538c79e1879 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.995137] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1648.995137] env[62820]: value = "task-1695976" [ 1648.995137] env[62820]: _type = "Task" [ 1648.995137] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.004912] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.005918] env[62820]: DEBUG nova.scheduler.client.report [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1649.006187] env[62820]: DEBUG nova.compute.provider_tree [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 111 to 112 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1649.006371] env[62820]: DEBUG nova.compute.provider_tree [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1649.106194] env[62820]: INFO nova.compute.manager [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Took 35.70 seconds to build instance. [ 1649.268704] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695975, 'name': Rename_Task, 'duration_secs': 0.559473} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.269239] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1649.269602] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4699a2ab-afbc-4b28-b052-383d6008ca06 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.278833] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1649.278833] env[62820]: value = "task-1695977" [ 1649.278833] env[62820]: _type = "Task" [ 1649.278833] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.285118] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.504369] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082762} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.504860] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1649.505893] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ade60e-137e-47e4-be11-ea3aa74e312f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.520605] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.904s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.533324] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea/78d9c7ad-af34-4e84-bd0c-d0bf287be0ea.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1649.534122] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.539s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.534501] env[62820]: DEBUG nova.objects.instance [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lazy-loading 'resources' on Instance uuid 9a1b9c99-57ef-4c16-97ca-739917c6c3d7 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1649.542998] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4188a34-00b1-4a61-8051-0f269fdabb2c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.562530] env[62820]: INFO nova.scheduler.client.report [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Deleted allocations for instance 4ae63ae5-0306-4540-be88-6e7d909c38a3 [ 1649.576338] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1649.576338] env[62820]: value = "task-1695978" [ 1649.576338] env[62820]: _type = "Task" [ 1649.576338] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.588038] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.608798] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4fc50f5-0c0c-4e1d-b8df-0290c710a15d tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.217s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.789271] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695977, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.916066] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb8dcf7-a844-487c-93c5-d4043ef151ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.925651] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d84a0a-99c8-4306-9881-552aca3fdec6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.964043] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8fb553c-b039-4443-b1b4-4b2373fd0fd8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.973072] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a80389a-51f0-4a42-9409-dd9051c0aaa4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.988850] env[62820]: DEBUG nova.compute.provider_tree [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1650.073951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b470ad46-9c0e-45d1-8f04-a654e810dfe0 tempest-ServerShowV247Test-1588982753 tempest-ServerShowV247Test-1588982753-project-member] Lock "4ae63ae5-0306-4540-be88-6e7d909c38a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.279s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.086850] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.287989] env[62820]: DEBUG oslo_vmware.api [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695977, 'name': PowerOnVM_Task, 'duration_secs': 0.757587} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.288429] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1650.288638] env[62820]: INFO nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Took 9.67 seconds to spawn the instance on the hypervisor. [ 1650.288812] env[62820]: DEBUG nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1650.291335] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395f37e4-fb3b-4944-b6f1-237153108312 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.523571] env[62820]: DEBUG nova.scheduler.client.report [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1650.523830] env[62820]: DEBUG nova.compute.provider_tree [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 112 to 113 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1650.524019] env[62820]: DEBUG nova.compute.provider_tree [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1650.589789] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695978, 'name': ReconfigVM_Task, 'duration_secs': 0.634789} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.590130] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea/78d9c7ad-af34-4e84-bd0c-d0bf287be0ea.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1650.590783] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80f83f70-0f43-45f3-b73c-7588a4a50345 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.597513] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1650.597513] env[62820]: value = "task-1695979" [ 1650.597513] env[62820]: _type = "Task" [ 1650.597513] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.611308] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695979, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.816288] env[62820]: INFO nova.compute.manager [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Took 36.52 seconds to build instance. [ 1651.028950] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.495s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.032728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.515s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.032728] env[62820]: DEBUG nova.objects.instance [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lazy-loading 'resources' on Instance uuid 361b7da3-0e8c-4291-aba0-8b6116b8032f {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1651.059324] env[62820]: INFO nova.scheduler.client.report [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleted allocations for instance 9a1b9c99-57ef-4c16-97ca-739917c6c3d7 [ 1651.109284] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695979, 'name': Rename_Task, 'duration_secs': 0.158587} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.109558] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1651.109796] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eed2cabb-998e-4d68-8cdf-da09bf10c7e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.116592] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1651.116592] env[62820]: value = "task-1695980" [ 1651.116592] env[62820]: _type = "Task" [ 1651.116592] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.125049] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695980, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.215144] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.321894] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c4d024dd-6b22-42ce-83d1-edf2a53c421d tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.033s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.322219] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.107s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.322219] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.322431] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.323062] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.324869] env[62820]: INFO nova.compute.manager [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Terminating instance [ 1651.568514] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b5ddf44-6ded-4a3f-9aa9-4e697b7479b8 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "9a1b9c99-57ef-4c16-97ca-739917c6c3d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.612s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.629395] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695980, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.658472] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "46434419-d6de-4cc1-905c-14698512b7a5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.659242] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "46434419-d6de-4cc1-905c-14698512b7a5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.659464] env[62820]: DEBUG nova.compute.manager [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1651.660357] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b2fa21-66e0-4901-ad71-1492e98cee77 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.673258] env[62820]: DEBUG nova.compute.manager [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1651.673818] env[62820]: DEBUG nova.objects.instance [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lazy-loading 'flavor' on Instance uuid 46434419-d6de-4cc1-905c-14698512b7a5 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1651.839017] env[62820]: DEBUG nova.compute.manager [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1651.839017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1651.839017] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56982a2-5967-4991-97ac-ffaaa5ea9852 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.844877] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1651.845143] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa0317cb-cf94-4c30-b8f3-f108697d2de5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.852845] env[62820]: DEBUG oslo_vmware.api [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1651.852845] env[62820]: value = "task-1695981" [ 1651.852845] env[62820]: _type = "Task" [ 1651.852845] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.866394] env[62820]: DEBUG oslo_vmware.api [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695981, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.872551] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1119c4a-4b54-4c52-b5c9-535044dbdd79 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.881167] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76b7b23-42fe-4378-a079-27a47b649ff9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.918381] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6648714-9d45-41f2-ba5f-090199d29e0d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.927537] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09bb6aa-223c-4c5c-b93a-e9ef16a05554 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.943988] env[62820]: DEBUG nova.compute.provider_tree [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.131031] env[62820]: DEBUG oslo_vmware.api [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695980, 'name': PowerOnVM_Task, 'duration_secs': 0.906225} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.131663] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1652.132022] env[62820]: INFO nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Took 9.06 seconds to spawn the instance on the hypervisor. [ 1652.132266] env[62820]: DEBUG nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1652.133437] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c2c218-a1a1-44d5-9933-1fa7d9003c8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.363213] env[62820]: DEBUG oslo_vmware.api [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695981, 'name': PowerOffVM_Task, 'duration_secs': 0.335487} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.363900] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1652.363900] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1652.364059] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8dda932d-64d7-47a8-9630-67e44b4cc7d0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.447058] env[62820]: DEBUG nova.scheduler.client.report [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1652.464798] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1652.465021] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1652.465311] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleting the datastore file [datastore1] 10f4cf46-89d2-4ac4-91d5-6626212f4f8e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1652.465499] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-626d5059-3703-4efe-b11d-a15fb9d0092a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.477689] env[62820]: DEBUG oslo_vmware.api [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1652.477689] env[62820]: value = "task-1695983" [ 1652.477689] env[62820]: _type = "Task" [ 1652.477689] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.487449] env[62820]: DEBUG oslo_vmware.api [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.655127] env[62820]: INFO nova.compute.manager [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Took 37.24 seconds to build instance. [ 1652.688719] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1652.689017] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76a09ad4-8168-48e3-9f2a-b24a309bf46c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.695192] env[62820]: DEBUG oslo_vmware.api [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1652.695192] env[62820]: value = "task-1695984" [ 1652.695192] env[62820]: _type = "Task" [ 1652.695192] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.704867] env[62820]: DEBUG oslo_vmware.api [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695984, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.952868] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.921s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.957788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.494s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.960272] env[62820]: INFO nova.compute.claims [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1652.983979] env[62820]: INFO nova.scheduler.client.report [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleted allocations for instance 361b7da3-0e8c-4291-aba0-8b6116b8032f [ 1652.997795] env[62820]: DEBUG oslo_vmware.api [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1695983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404338} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.997795] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1652.998317] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1652.998553] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1652.998730] env[62820]: INFO nova.compute.manager [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1652.999140] env[62820]: DEBUG oslo.service.loopingcall [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1652.999356] env[62820]: DEBUG nova.compute.manager [-] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1652.999460] env[62820]: DEBUG nova.network.neutron [-] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1653.157289] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9245a3ba-c2b5-4a5f-82ae-122af8b2218b tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.750s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.206414] env[62820]: DEBUG oslo_vmware.api [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695984, 'name': PowerOffVM_Task, 'duration_secs': 0.294583} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.206722] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1653.206930] env[62820]: DEBUG nova.compute.manager [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1653.207958] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e7e083-7e31-4142-aa5f-c56abb151489 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.501853] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a908113e-f691-42ba-8b33-6d1998df0163 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "361b7da3-0e8c-4291-aba0-8b6116b8032f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.624s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.723981] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74bb8a9e-cf92-4ed3-a13b-b62ddb2b2716 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "46434419-d6de-4cc1-905c-14698512b7a5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.065s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.811204] env[62820]: DEBUG nova.compute.manager [req-49948d80-35b5-49ba-9f3e-32b8eb77a4d9 req-25e86807-bdbf-4851-8c78-ac0ad033640b service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Received event network-vif-deleted-61a1e393-a7d4-4958-b01e-9365a16c1794 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1653.811432] env[62820]: INFO nova.compute.manager [req-49948d80-35b5-49ba-9f3e-32b8eb77a4d9 req-25e86807-bdbf-4851-8c78-ac0ad033640b service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Neutron deleted interface 61a1e393-a7d4-4958-b01e-9365a16c1794; detaching it from the instance and deleting it from the info cache [ 1653.811589] env[62820]: DEBUG nova.network.neutron [req-49948d80-35b5-49ba-9f3e-32b8eb77a4d9 req-25e86807-bdbf-4851-8c78-ac0ad033640b service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.191601] env[62820]: DEBUG nova.network.neutron [-] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.286605] env[62820]: DEBUG nova.objects.instance [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lazy-loading 'flavor' on Instance uuid 46434419-d6de-4cc1-905c-14698512b7a5 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1654.315052] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4427a0d6-2f0c-4f0f-8935-643bb4b83959 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.327760] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1620ff-8780-4349-9ebe-26744444d5f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.345738] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06844e7-eac0-4d9e-b550-4fcae0bacfeb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.353887] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805f261b-1f3c-4542-9f8a-78a0f4bc2a02 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.402476] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9f1da9-ff3c-4d57-8f2d-90793d935e39 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.405946] env[62820]: DEBUG nova.compute.manager [req-49948d80-35b5-49ba-9f3e-32b8eb77a4d9 req-25e86807-bdbf-4851-8c78-ac0ad033640b service nova] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Detach interface failed, port_id=61a1e393-a7d4-4958-b01e-9365a16c1794, reason: Instance 10f4cf46-89d2-4ac4-91d5-6626212f4f8e could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1654.413183] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0719d06f-0d31-40ff-a549-05084428eab6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.426869] env[62820]: DEBUG nova.compute.provider_tree [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1654.694158] env[62820]: INFO nova.compute.manager [-] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Took 1.69 seconds to deallocate network for instance. [ 1654.795117] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.795303] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquired lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.795484] env[62820]: DEBUG nova.network.neutron [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1654.795709] env[62820]: DEBUG nova.objects.instance [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lazy-loading 'info_cache' on Instance uuid 46434419-d6de-4cc1-905c-14698512b7a5 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1654.930324] env[62820]: DEBUG nova.scheduler.client.report [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1655.056393] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.056616] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.097253] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.097822] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.203303] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.223493] env[62820]: INFO nova.compute.manager [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Rebuilding instance [ 1655.286897] env[62820]: DEBUG nova.compute.manager [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1655.288039] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6841d3-e812-4bc3-9c1f-b9385474fb4d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.299084] env[62820]: DEBUG nova.objects.base [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Object Instance<46434419-d6de-4cc1-905c-14698512b7a5> lazy-loaded attributes: flavor,info_cache {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1655.442926] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.443482] env[62820]: DEBUG nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1655.447833] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.103s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.449103] env[62820]: INFO nova.compute.claims [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1655.559599] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1655.600615] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1655.953419] env[62820]: DEBUG nova.compute.utils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1655.958799] env[62820]: DEBUG nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1655.958799] env[62820]: DEBUG nova.network.neutron [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1656.022346] env[62820]: DEBUG nova.policy [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b083c37557b646efab5aa420d3bf1037', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1d5e893aa5a4d678547c1fb24e3fc68', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1656.101330] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.122483] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.150912] env[62820]: DEBUG nova.network.neutron [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Updating instance_info_cache with network_info: [{"id": "b36fcffd-baf9-4baa-a860-018d98ea5451", "address": "fa:16:3e:b4:92:08", "network": {"id": "90f765b9-2cc1-496d-a434-a2c8c679a344", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-98181575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2cd6822ef42b42b1b90e2cb4e7d20a8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb36fcffd-ba", "ovs_interfaceid": "b36fcffd-baf9-4baa-a860-018d98ea5451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.305807] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.305939] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.311979] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1656.312216] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a3f59f5-6c77-45d0-88f2-65b2edd44fd2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.320159] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1656.320159] env[62820]: value = "task-1695985" [ 1656.320159] env[62820]: _type = "Task" [ 1656.320159] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.331502] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.425457] env[62820]: DEBUG nova.network.neutron [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Successfully created port: c130871a-4770-4d22-83ec-723c4773ee6c {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1656.459557] env[62820]: DEBUG nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1656.654299] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Releasing lock "refresh_cache-46434419-d6de-4cc1-905c-14698512b7a5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.808881] env[62820]: DEBUG nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1656.834315] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695985, 'name': PowerOffVM_Task, 'duration_secs': 0.208208} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.834650] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1656.834931] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1656.835857] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef103f5-f317-4150-a166-d7606ac047e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.843613] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1656.843816] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d848e499-a3e6-41ad-9ee2-eecc17673fc0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.942678] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca7f5cf-bfc0-4628-83b8-28f1e661043d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.951089] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2445ed3d-a4e8-40ea-be3c-086274ee6c94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.956499] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1656.957039] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1656.957039] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleting the datastore file [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1656.957211] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e437fc82-7703-4202-9ee3-8886e9756a52 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.991020] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352d5a0a-d68c-4165-95ab-7b17c959cbdf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.993591] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1656.993591] env[62820]: value = "task-1695987" [ 1656.993591] env[62820]: _type = "Task" [ 1656.993591] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.999976] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fd5a31-503a-4d76-84ab-a301d5ac9bf2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.006840] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.016302] env[62820]: DEBUG nova.compute.provider_tree [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1657.254951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.255223] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.332922] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.490435] env[62820]: DEBUG nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1657.503975] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.517154] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1657.517401] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1657.517555] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1657.518159] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1657.518159] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1657.518159] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1657.518314] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1657.518467] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1657.518635] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1657.518799] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1657.518997] env[62820]: DEBUG nova.virt.hardware [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1657.520275] env[62820]: DEBUG nova.scheduler.client.report [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1657.524025] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b651c5-7fbe-448e-b76b-f3120052cc4c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.532508] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb363316-6f13-4810-a64f-41ae7df0be06 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.659589] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1657.660853] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f506cd51-10eb-4846-9e93-32d59f1e587b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.669098] env[62820]: DEBUG oslo_vmware.api [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1657.669098] env[62820]: value = "task-1695988" [ 1657.669098] env[62820]: _type = "Task" [ 1657.669098] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.680850] env[62820]: DEBUG oslo_vmware.api [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695988, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.757489] env[62820]: DEBUG nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1657.985210] env[62820]: DEBUG nova.compute.manager [req-ae42d3f6-f01e-4814-aa47-2e4e818043d0 req-2421ddf8-65b0-4c3e-81e2-da9db144bf7e service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Received event network-vif-plugged-c130871a-4770-4d22-83ec-723c4773ee6c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1657.985510] env[62820]: DEBUG oslo_concurrency.lockutils [req-ae42d3f6-f01e-4814-aa47-2e4e818043d0 req-2421ddf8-65b0-4c3e-81e2-da9db144bf7e service nova] Acquiring lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.985510] env[62820]: DEBUG oslo_concurrency.lockutils [req-ae42d3f6-f01e-4814-aa47-2e4e818043d0 req-2421ddf8-65b0-4c3e-81e2-da9db144bf7e service nova] Lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.985832] env[62820]: DEBUG oslo_concurrency.lockutils [req-ae42d3f6-f01e-4814-aa47-2e4e818043d0 req-2421ddf8-65b0-4c3e-81e2-da9db144bf7e service nova] Lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.985933] env[62820]: DEBUG nova.compute.manager [req-ae42d3f6-f01e-4814-aa47-2e4e818043d0 req-2421ddf8-65b0-4c3e-81e2-da9db144bf7e service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] No waiting events found dispatching network-vif-plugged-c130871a-4770-4d22-83ec-723c4773ee6c {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1657.986169] env[62820]: WARNING nova.compute.manager [req-ae42d3f6-f01e-4814-aa47-2e4e818043d0 req-2421ddf8-65b0-4c3e-81e2-da9db144bf7e service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Received unexpected event network-vif-plugged-c130871a-4770-4d22-83ec-723c4773ee6c for instance with vm_state building and task_state spawning. [ 1658.007755] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.606199} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.007755] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1658.007755] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1658.007755] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1658.028467] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.028467] env[62820]: DEBUG nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1658.030538] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.389s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.030695] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.030861] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1658.031160] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.862s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.031358] env[62820]: DEBUG nova.objects.instance [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lazy-loading 'resources' on Instance uuid 0d519bc8-3cc1-429e-b41b-ed0035622562 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1658.034597] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0695e96-8370-49e2-9b2e-c45f188bfbed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.049845] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922e66ba-ad63-46c2-b423-7f52434026c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.058222] env[62820]: DEBUG nova.network.neutron [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Successfully updated port: c130871a-4770-4d22-83ec-723c4773ee6c {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1658.069840] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84fce9d-6219-47f2-b536-88aa8a015e41 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.076872] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5ab488-c372-4350-b08d-c4c3d5186082 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.106562] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179269MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1658.106724] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.181494] env[62820]: DEBUG oslo_vmware.api [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695988, 'name': PowerOnVM_Task, 'duration_secs': 0.48321} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.182114] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1658.182752] env[62820]: DEBUG nova.compute.manager [None req-b7e8172b-4b89-473a-9a1a-4aa78f17a672 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1658.183886] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27d50bc-e769-4e65-bc35-906d353b6ab6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.964936] env[62820]: DEBUG nova.compute.utils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1658.968434] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquiring lock "refresh_cache-29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1658.968568] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquired lock "refresh_cache-29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.968709] env[62820]: DEBUG nova.network.neutron [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1658.973037] env[62820]: DEBUG nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1658.973205] env[62820]: DEBUG nova.network.neutron [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1658.992171] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.044607] env[62820]: DEBUG nova.policy [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e81a169ac4144a5bbc0a4e3a077cb4a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65abf73e789b48d3ba24e2660d7c0341', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1659.276802] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94df6598-b276-4695-b4b4-414a2d4fa3e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.284520] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb88260-5224-48f2-a4b0-2a181ff51a6a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.313921] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645a2e85-b9a7-40da-841f-bbeb8050e5f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.321275] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5396ff-cbb7-4d20-91ac-c599d52c1fc9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.335428] env[62820]: DEBUG nova.compute.provider_tree [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1659.409553] env[62820]: DEBUG nova.network.neutron [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Successfully created port: ed80036e-4305-4c40-823e-e6704c6b3aaf {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1659.474610] env[62820]: DEBUG nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1659.497828] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1659.497989] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1659.498124] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1659.498314] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1659.498459] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1659.498607] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1659.498848] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1659.499198] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1659.499643] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1659.499733] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1659.499865] env[62820]: DEBUG nova.virt.hardware [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1659.500718] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76072da6-87d8-4d09-863d-125802c60275 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.508866] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34738b8-d7d9-4780-b2dd-1c2237129385 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.513789] env[62820]: DEBUG nova.network.neutron [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1659.527122] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:d3:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a874c214-8cdf-4a41-a718-84262b2a28d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cc832bc-1ef4-4db9-9b80-de3bcd73a298', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1659.534649] env[62820]: DEBUG oslo.service.loopingcall [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.537207] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1659.537512] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8572a0d9-bcfe-4f87-b52b-3e3c79e32d8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.557740] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1659.557740] env[62820]: value = "task-1695989" [ 1659.557740] env[62820]: _type = "Task" [ 1659.557740] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.568732] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695989, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.765132] env[62820]: DEBUG nova.network.neutron [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Updating instance_info_cache with network_info: [{"id": "c130871a-4770-4d22-83ec-723c4773ee6c", "address": "fa:16:3e:15:20:4a", "network": {"id": "300a6ffe-d731-4e5a-800e-e4712518da82", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-455619482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1d5e893aa5a4d678547c1fb24e3fc68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc130871a-47", "ovs_interfaceid": "c130871a-4770-4d22-83ec-723c4773ee6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.839195] env[62820]: DEBUG nova.scheduler.client.report [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1660.051177] env[62820]: DEBUG nova.compute.manager [req-bae7427b-6556-4cbf-b4e2-225615f95321 req-63a5d0aa-084f-40fa-88fd-6573a4ba3993 service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Received event network-changed-c130871a-4770-4d22-83ec-723c4773ee6c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1660.051177] env[62820]: DEBUG nova.compute.manager [req-bae7427b-6556-4cbf-b4e2-225615f95321 req-63a5d0aa-084f-40fa-88fd-6573a4ba3993 service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Refreshing instance network info cache due to event network-changed-c130871a-4770-4d22-83ec-723c4773ee6c. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1660.051462] env[62820]: DEBUG oslo_concurrency.lockutils [req-bae7427b-6556-4cbf-b4e2-225615f95321 req-63a5d0aa-084f-40fa-88fd-6573a4ba3993 service nova] Acquiring lock "refresh_cache-29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.068351] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695989, 'name': CreateVM_Task, 'duration_secs': 0.30421} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.068524] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1660.069565] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.069733] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.070073] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1660.070337] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c007ca2-1f1d-49f4-ac06-a105c99a57a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.075595] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1660.075595] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5230d0b0-3287-0941-c6c7-a1400cd47ffd" [ 1660.075595] env[62820]: _type = "Task" [ 1660.075595] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.083662] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5230d0b0-3287-0941-c6c7-a1400cd47ffd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.269704] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Releasing lock "refresh_cache-29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.270190] env[62820]: DEBUG nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Instance network_info: |[{"id": "c130871a-4770-4d22-83ec-723c4773ee6c", "address": "fa:16:3e:15:20:4a", "network": {"id": "300a6ffe-d731-4e5a-800e-e4712518da82", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-455619482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1d5e893aa5a4d678547c1fb24e3fc68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc130871a-47", "ovs_interfaceid": "c130871a-4770-4d22-83ec-723c4773ee6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1660.270425] env[62820]: DEBUG oslo_concurrency.lockutils [req-bae7427b-6556-4cbf-b4e2-225615f95321 req-63a5d0aa-084f-40fa-88fd-6573a4ba3993 service nova] Acquired lock "refresh_cache-29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.270613] env[62820]: DEBUG nova.network.neutron [req-bae7427b-6556-4cbf-b4e2-225615f95321 req-63a5d0aa-084f-40fa-88fd-6573a4ba3993 service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Refreshing network info cache for port c130871a-4770-4d22-83ec-723c4773ee6c {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1660.271915] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:20:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f68ebd2a-3c68-48db-8c32-8a01497fc2e7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c130871a-4770-4d22-83ec-723c4773ee6c', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1660.279265] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Creating folder: Project (e1d5e893aa5a4d678547c1fb24e3fc68). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1660.280268] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51b7edc5-aec7-4a10-a5f0-6e5e018af849 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.291925] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Created folder: Project (e1d5e893aa5a4d678547c1fb24e3fc68) in parent group-v353379. [ 1660.292113] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Creating folder: Instances. Parent ref: group-v353611. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1660.292339] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7414d8e7-ad69-428a-a113-087118e1cb0d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.302076] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Created folder: Instances in parent group-v353611. [ 1660.302354] env[62820]: DEBUG oslo.service.loopingcall [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1660.302539] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1660.302930] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3812c0fb-05f3-45e7-8d24-68260f509f23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.321915] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1660.321915] env[62820]: value = "task-1695992" [ 1660.321915] env[62820]: _type = "Task" [ 1660.321915] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.330119] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695992, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.346119] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.315s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.348517] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.017s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.348751] env[62820]: DEBUG nova.objects.instance [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lazy-loading 'resources' on Instance uuid b6c58867-914e-4e6e-8092-fc8991dc87f7 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1660.369742] env[62820]: INFO nova.scheduler.client.report [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Deleted allocations for instance 0d519bc8-3cc1-429e-b41b-ed0035622562 [ 1660.489361] env[62820]: DEBUG nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1660.513765] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1660.514012] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1660.514172] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1660.514355] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1660.514497] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1660.514639] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1660.514840] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1660.514993] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1660.515176] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1660.515336] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1660.515506] env[62820]: DEBUG nova.virt.hardware [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1660.516368] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0831c97c-2fdd-4bad-91fd-62c24281995a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.524042] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e872c0e1-2efa-4d9d-8790-769ac54d0a95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.585117] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5230d0b0-3287-0941-c6c7-a1400cd47ffd, 'name': SearchDatastore_Task, 'duration_secs': 0.009649} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.585411] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.585648] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1660.585906] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.586069] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.586251] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1660.586495] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4dcde23-c48a-432f-989a-8ef0900f499d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.593811] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1660.593984] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1660.594653] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-951e2678-7dd7-4f39-a364-43d8d947b832 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.599708] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1660.599708] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5224d0eb-3704-3bf7-62d5-0cd33420ded5" [ 1660.599708] env[62820]: _type = "Task" [ 1660.599708] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.606928] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5224d0eb-3704-3bf7-62d5-0cd33420ded5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.835171] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695992, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.877617] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa139563-20ef-4828-acc3-98edf0f93f65 tempest-ImagesTestJSON-108607288 tempest-ImagesTestJSON-108607288-project-member] Lock "0d519bc8-3cc1-429e-b41b-ed0035622562" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.493s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.914994] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.914994] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.914994] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.914994] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.914994] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.917837] env[62820]: INFO nova.compute.manager [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Terminating instance [ 1660.966279] env[62820]: DEBUG nova.network.neutron [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Successfully updated port: ed80036e-4305-4c40-823e-e6704c6b3aaf {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1661.020158] env[62820]: DEBUG nova.network.neutron [req-bae7427b-6556-4cbf-b4e2-225615f95321 req-63a5d0aa-084f-40fa-88fd-6573a4ba3993 service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Updated VIF entry in instance network info cache for port c130871a-4770-4d22-83ec-723c4773ee6c. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1661.020549] env[62820]: DEBUG nova.network.neutron [req-bae7427b-6556-4cbf-b4e2-225615f95321 req-63a5d0aa-084f-40fa-88fd-6573a4ba3993 service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Updating instance_info_cache with network_info: [{"id": "c130871a-4770-4d22-83ec-723c4773ee6c", "address": "fa:16:3e:15:20:4a", "network": {"id": "300a6ffe-d731-4e5a-800e-e4712518da82", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-455619482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1d5e893aa5a4d678547c1fb24e3fc68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc130871a-47", "ovs_interfaceid": "c130871a-4770-4d22-83ec-723c4773ee6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.109279] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5224d0eb-3704-3bf7-62d5-0cd33420ded5, 'name': SearchDatastore_Task, 'duration_secs': 0.008803} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.112298] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c89a8e50-ee1b-4717-8c42-f631fdaffcfa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.117390] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1661.117390] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52be9962-e987-da31-0519-9d98580d8351" [ 1661.117390] env[62820]: _type = "Task" [ 1661.117390] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.127010] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52be9962-e987-da31-0519-9d98580d8351, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.227981] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d5ab38-85eb-4eba-a754-2b5c828ae185 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.237023] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac2970f-a64c-49f4-8cc2-83e6266b0f40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.268045] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e0af5e-791a-49b6-98ce-d8fd6bc13405 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.275683] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff56855-d844-472f-bbe3-612bc2b78106 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.289476] env[62820]: DEBUG nova.compute.provider_tree [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1661.332362] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695992, 'name': CreateVM_Task, 'duration_secs': 0.97143} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.332529] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1661.333205] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.333373] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.333692] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1661.333940] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff331c15-feba-474a-9eac-d2a88173fe44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.338472] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1661.338472] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5203ff8e-cce4-a1f6-d0a0-632efd1227f9" [ 1661.338472] env[62820]: _type = "Task" [ 1661.338472] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.345898] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5203ff8e-cce4-a1f6-d0a0-632efd1227f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.422997] env[62820]: DEBUG nova.compute.manager [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1661.423264] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1661.424178] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdba397-909c-4410-a9f9-e942e83cc335 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.432785] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1661.433058] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0221274-ac25-4e9a-bddd-203e77546058 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.439943] env[62820]: DEBUG oslo_vmware.api [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1661.439943] env[62820]: value = "task-1695993" [ 1661.439943] env[62820]: _type = "Task" [ 1661.439943] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.451135] env[62820]: DEBUG oslo_vmware.api [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.469103] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-53ba381a-9f81-4c37-8758-af56fc165dd7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.469103] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-53ba381a-9f81-4c37-8758-af56fc165dd7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.469103] env[62820]: DEBUG nova.network.neutron [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1661.523607] env[62820]: DEBUG oslo_concurrency.lockutils [req-bae7427b-6556-4cbf-b4e2-225615f95321 req-63a5d0aa-084f-40fa-88fd-6573a4ba3993 service nova] Releasing lock "refresh_cache-29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.628110] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52be9962-e987-da31-0519-9d98580d8351, 'name': SearchDatastore_Task, 'duration_secs': 0.008891} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.628231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.628449] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea/78d9c7ad-af34-4e84-bd0c-d0bf287be0ea.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1661.628749] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0bfb493-1f92-4297-927a-91795534d277 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.638938] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1661.638938] env[62820]: value = "task-1695994" [ 1661.638938] env[62820]: _type = "Task" [ 1661.638938] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.654383] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.792550] env[62820]: DEBUG nova.scheduler.client.report [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1661.850230] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5203ff8e-cce4-a1f6-d0a0-632efd1227f9, 'name': SearchDatastore_Task, 'duration_secs': 0.030687} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.850527] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.850753] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1661.850981] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.851139] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.851315] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1661.851664] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acf70980-ad13-4557-be5e-8bd1f258dcc8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.866656] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1661.866862] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1661.867609] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8001d7a6-ba36-4236-b13d-c4f257ceb2c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.874567] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1661.874567] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c714dc-eb4c-6992-9ecd-a12bf6d1bfb6" [ 1661.874567] env[62820]: _type = "Task" [ 1661.874567] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.884348] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c714dc-eb4c-6992-9ecd-a12bf6d1bfb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.951025] env[62820]: DEBUG oslo_vmware.api [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695993, 'name': PowerOffVM_Task, 'duration_secs': 0.391478} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.951282] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1661.951438] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1661.951730] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88ddda12-73cd-41b6-9cc8-adbaa4df2a90 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.020238] env[62820]: DEBUG nova.network.neutron [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1662.057438] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1662.057438] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1662.057438] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleting the datastore file [datastore1] 5fbb6021-ca7d-4cce-90c9-113b7d833d49 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1662.057438] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b69f733-0bd6-472e-a213-9aa4cf3ea5a8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.061324] env[62820]: DEBUG oslo_vmware.api [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1662.061324] env[62820]: value = "task-1695996" [ 1662.061324] env[62820]: _type = "Task" [ 1662.061324] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.069789] env[62820]: DEBUG oslo_vmware.api [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.117593] env[62820]: DEBUG nova.compute.manager [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Received event network-vif-plugged-ed80036e-4305-4c40-823e-e6704c6b3aaf {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1662.117960] env[62820]: DEBUG oslo_concurrency.lockutils [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] Acquiring lock "53ba381a-9f81-4c37-8758-af56fc165dd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.118088] env[62820]: DEBUG oslo_concurrency.lockutils [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.118255] env[62820]: DEBUG oslo_concurrency.lockutils [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.118422] env[62820]: DEBUG nova.compute.manager [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] No waiting events found dispatching network-vif-plugged-ed80036e-4305-4c40-823e-e6704c6b3aaf {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1662.118577] env[62820]: WARNING nova.compute.manager [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Received unexpected event network-vif-plugged-ed80036e-4305-4c40-823e-e6704c6b3aaf for instance with vm_state building and task_state spawning. [ 1662.118732] env[62820]: DEBUG nova.compute.manager [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Received event network-changed-ed80036e-4305-4c40-823e-e6704c6b3aaf {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1662.118900] env[62820]: DEBUG nova.compute.manager [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Refreshing instance network info cache due to event network-changed-ed80036e-4305-4c40-823e-e6704c6b3aaf. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1662.119269] env[62820]: DEBUG oslo_concurrency.lockutils [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] Acquiring lock "refresh_cache-53ba381a-9f81-4c37-8758-af56fc165dd7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.151906] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695994, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.234065] env[62820]: DEBUG nova.network.neutron [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Updating instance_info_cache with network_info: [{"id": "ed80036e-4305-4c40-823e-e6704c6b3aaf", "address": "fa:16:3e:6b:ba:77", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped80036e-43", "ovs_interfaceid": "ed80036e-4305-4c40-823e-e6704c6b3aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1662.299024] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.949s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.301511] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.098s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.304354] env[62820]: DEBUG nova.objects.instance [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lazy-loading 'resources' on Instance uuid 10f4cf46-89d2-4ac4-91d5-6626212f4f8e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1662.330409] env[62820]: INFO nova.scheduler.client.report [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Deleted allocations for instance b6c58867-914e-4e6e-8092-fc8991dc87f7 [ 1662.386458] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c714dc-eb4c-6992-9ecd-a12bf6d1bfb6, 'name': SearchDatastore_Task, 'duration_secs': 0.053505} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.387249] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-490766f1-59f5-4605-9d48-14011f0f723c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.393010] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1662.393010] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526571c0-c5b3-0f9c-510d-355a01b289f7" [ 1662.393010] env[62820]: _type = "Task" [ 1662.393010] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.402296] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526571c0-c5b3-0f9c-510d-355a01b289f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.571487] env[62820]: DEBUG oslo_vmware.api [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1695996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205323} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.571753] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1662.572017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1662.572295] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1662.572534] env[62820]: INFO nova.compute.manager [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1662.572859] env[62820]: DEBUG oslo.service.loopingcall [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1662.573121] env[62820]: DEBUG nova.compute.manager [-] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1662.573249] env[62820]: DEBUG nova.network.neutron [-] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1662.649717] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524405} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.653123] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea/78d9c7ad-af34-4e84-bd0c-d0bf287be0ea.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1662.653123] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1662.653123] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da9ebc5d-1b02-4753-af0d-79dfbe2cc01f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.658061] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1662.658061] env[62820]: value = "task-1695997" [ 1662.658061] env[62820]: _type = "Task" [ 1662.658061] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.669222] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695997, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.737011] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-53ba381a-9f81-4c37-8758-af56fc165dd7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1662.737392] env[62820]: DEBUG nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Instance network_info: |[{"id": "ed80036e-4305-4c40-823e-e6704c6b3aaf", "address": "fa:16:3e:6b:ba:77", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped80036e-43", "ovs_interfaceid": "ed80036e-4305-4c40-823e-e6704c6b3aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1662.737705] env[62820]: DEBUG oslo_concurrency.lockutils [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] Acquired lock "refresh_cache-53ba381a-9f81-4c37-8758-af56fc165dd7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.737947] env[62820]: DEBUG nova.network.neutron [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Refreshing network info cache for port ed80036e-4305-4c40-823e-e6704c6b3aaf {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1662.739467] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:ba:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed80036e-4305-4c40-823e-e6704c6b3aaf', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1662.747715] env[62820]: DEBUG oslo.service.loopingcall [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1662.748310] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1662.748555] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-043e6c9a-26ea-4e38-9303-44f106ba49b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.771461] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1662.771461] env[62820]: value = "task-1695998" [ 1662.771461] env[62820]: _type = "Task" [ 1662.771461] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.781669] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695998, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.843079] env[62820]: DEBUG oslo_concurrency.lockutils [None req-74ba133c-b60f-4b7f-8f76-c19390cc5c7c tempest-ServersWithSpecificFlavorTestJSON-1536380260 tempest-ServersWithSpecificFlavorTestJSON-1536380260-project-member] Lock "b6c58867-914e-4e6e-8092-fc8991dc87f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.535s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.904103] env[62820]: DEBUG nova.compute.manager [req-23448faf-cabb-4bca-9cd8-19d312102cc9 req-032c6f61-69c9-4687-93ea-0321be82d8da service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Received event network-vif-deleted-66a18bd3-fb6d-4675-9ccf-44fe90f97e13 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1662.904343] env[62820]: INFO nova.compute.manager [req-23448faf-cabb-4bca-9cd8-19d312102cc9 req-032c6f61-69c9-4687-93ea-0321be82d8da service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Neutron deleted interface 66a18bd3-fb6d-4675-9ccf-44fe90f97e13; detaching it from the instance and deleting it from the info cache [ 1662.904525] env[62820]: DEBUG nova.network.neutron [req-23448faf-cabb-4bca-9cd8-19d312102cc9 req-032c6f61-69c9-4687-93ea-0321be82d8da service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1662.914626] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526571c0-c5b3-0f9c-510d-355a01b289f7, 'name': SearchDatastore_Task, 'duration_secs': 0.008954} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.915413] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1662.915679] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8/29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1662.915959] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05fb8331-9b33-4d7d-bed7-b60f957d8e20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.922877] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1662.922877] env[62820]: value = "task-1695999" [ 1662.922877] env[62820]: _type = "Task" [ 1662.922877] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.931420] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1695999, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.141348] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e937fe78-deef-4af0-b4b1-a15acff54c5a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.150805] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaf1411-44ac-4136-a70f-df860e43cb3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.191590] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35d59a6-122f-45f5-8964-4dc21072440a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.197645] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1695997, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06958} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.198592] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1663.199969] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e94a6f-35d3-4c92-83c6-3ddefaa01ad3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.206020] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d6c86a-446f-4511-bf3a-7122577fc496 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.229481] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea/78d9c7ad-af34-4e84-bd0c-d0bf287be0ea.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1663.230383] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9532c2a-8bdc-47b1-98a0-379cba50a287 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.255918] env[62820]: DEBUG nova.compute.provider_tree [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1663.263750] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1663.263750] env[62820]: value = "task-1696000" [ 1663.263750] env[62820]: _type = "Task" [ 1663.263750] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.274183] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696000, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.281831] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695998, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.362464] env[62820]: DEBUG nova.network.neutron [-] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.410705] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a867c1e4-a251-49f4-8e92-42ec010e7980 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.421805] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a92fd36-1084-4647-a98e-386738aea214 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.454264] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1695999, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.477295] env[62820]: DEBUG nova.compute.manager [req-23448faf-cabb-4bca-9cd8-19d312102cc9 req-032c6f61-69c9-4687-93ea-0321be82d8da service nova] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Detach interface failed, port_id=66a18bd3-fb6d-4675-9ccf-44fe90f97e13, reason: Instance 5fbb6021-ca7d-4cce-90c9-113b7d833d49 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1663.608686] env[62820]: DEBUG nova.network.neutron [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Updated VIF entry in instance network info cache for port ed80036e-4305-4c40-823e-e6704c6b3aaf. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1663.609353] env[62820]: DEBUG nova.network.neutron [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Updating instance_info_cache with network_info: [{"id": "ed80036e-4305-4c40-823e-e6704c6b3aaf", "address": "fa:16:3e:6b:ba:77", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped80036e-43", "ovs_interfaceid": "ed80036e-4305-4c40-823e-e6704c6b3aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.773832] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.777693] env[62820]: ERROR nova.scheduler.client.report [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [req-f23f120e-419a-48ff-80ae-289b850fdba7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f23f120e-419a-48ff-80ae-289b850fdba7"}]} [ 1663.784760] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1695998, 'name': CreateVM_Task, 'duration_secs': 0.798387} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.784847] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1663.785790] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.786108] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.786694] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1663.786788] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1da605b6-2346-4fb3-a8e3-64ed583708f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.791594] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1663.791594] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5288df63-670b-a129-2059-3376e37d35a7" [ 1663.791594] env[62820]: _type = "Task" [ 1663.791594] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.795735] env[62820]: DEBUG nova.scheduler.client.report [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1663.804609] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5288df63-670b-a129-2059-3376e37d35a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009438} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.804883] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.805153] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1663.805873] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.805873] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.805873] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1663.806231] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6680f93-5e74-4d51-87a2-eb7eebf99d48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.813060] env[62820]: DEBUG nova.scheduler.client.report [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1663.813353] env[62820]: DEBUG nova.compute.provider_tree [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1663.816669] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1663.816838] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1663.817610] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d85d924-f379-4e99-bc80-f683f867bebd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.824678] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1663.824678] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d6797d-e210-2653-991c-4358878a6e5a" [ 1663.824678] env[62820]: _type = "Task" [ 1663.824678] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.834204] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d6797d-e210-2653-991c-4358878a6e5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.835176] env[62820]: DEBUG nova.scheduler.client.report [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1663.857934] env[62820]: DEBUG nova.scheduler.client.report [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1663.865198] env[62820]: INFO nova.compute.manager [-] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Took 1.29 seconds to deallocate network for instance. [ 1663.958416] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1695999, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686973} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.958416] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8/29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1663.958655] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1663.958798] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1572d8dc-0b7b-4564-ac7d-e34096a3c415 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.966955] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1663.966955] env[62820]: value = "task-1696001" [ 1663.966955] env[62820]: _type = "Task" [ 1663.966955] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.979807] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.113941] env[62820]: DEBUG oslo_concurrency.lockutils [req-07af1caf-023d-4357-8eda-9f873b12c4e3 req-ee76569a-f125-4af5-b312-5ea66dba8be8 service nova] Releasing lock "refresh_cache-53ba381a-9f81-4c37-8758-af56fc165dd7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.189986] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0890987b-bc0b-4711-85ca-2c87d37df6e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.197674] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f716ff6e-4a36-4e27-a75d-9ca85916d0b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.231261] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ad7af0-93ca-4256-9ff6-a5b42b2524c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.240292] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b54e656-b0e0-45e9-a48e-fe166a47c166 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.253846] env[62820]: DEBUG nova.compute.provider_tree [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1664.274732] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696000, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.336039] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d6797d-e210-2653-991c-4358878a6e5a, 'name': SearchDatastore_Task, 'duration_secs': 0.026592} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.336675] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d6e256-18bd-466c-9f2b-6c30a97856d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.342313] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1664.342313] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5209c273-2bb2-00e9-6575-c356489d3259" [ 1664.342313] env[62820]: _type = "Task" [ 1664.342313] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.350346] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5209c273-2bb2-00e9-6575-c356489d3259, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.372560] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.481631] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.309171} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.481923] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1664.482719] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68756d98-4f95-458f-847b-925cdfd8c776 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.504879] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8/29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1664.505476] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10b72561-0e6d-4aa1-a289-eb406319285a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.526282] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1664.526282] env[62820]: value = "task-1696002" [ 1664.526282] env[62820]: _type = "Task" [ 1664.526282] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.534610] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696002, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.760027] env[62820]: DEBUG nova.scheduler.client.report [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1664.782873] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696000, 'name': ReconfigVM_Task, 'duration_secs': 1.065359} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.783875] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea/78d9c7ad-af34-4e84-bd0c-d0bf287be0ea.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1664.784497] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54197351-fb0e-401c-805d-012f6c2d8dc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.792654] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1664.792654] env[62820]: value = "task-1696003" [ 1664.792654] env[62820]: _type = "Task" [ 1664.792654] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.801869] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696003, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.856138] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5209c273-2bb2-00e9-6575-c356489d3259, 'name': SearchDatastore_Task, 'duration_secs': 0.014378} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.856868] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.857212] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 53ba381a-9f81-4c37-8758-af56fc165dd7/53ba381a-9f81-4c37-8758-af56fc165dd7.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1664.857486] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7110190-0878-412f-bf3b-a811d9327d92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.866414] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1664.866414] env[62820]: value = "task-1696004" [ 1664.866414] env[62820]: _type = "Task" [ 1664.866414] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.876595] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.941169] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Acquiring lock "b7c9f518-c908-42cc-ba09-59b0f8431f68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.941500] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "b7c9f518-c908-42cc-ba09-59b0f8431f68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.941723] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Acquiring lock "b7c9f518-c908-42cc-ba09-59b0f8431f68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.941933] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "b7c9f518-c908-42cc-ba09-59b0f8431f68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.942219] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "b7c9f518-c908-42cc-ba09-59b0f8431f68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.948262] env[62820]: INFO nova.compute.manager [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Terminating instance [ 1665.044752] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696002, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.267914] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.968s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.274536] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.170s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.276267] env[62820]: INFO nova.compute.claims [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1665.298688] env[62820]: INFO nova.scheduler.client.report [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleted allocations for instance 10f4cf46-89d2-4ac4-91d5-6626212f4f8e [ 1665.311860] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696003, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.378369] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696004, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.464190] env[62820]: DEBUG nova.compute.manager [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1665.464454] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1665.464743] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0ba22a3-550f-4e68-b5ab-45e97932c6ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.473170] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1665.473170] env[62820]: value = "task-1696005" [ 1665.473170] env[62820]: _type = "Task" [ 1665.473170] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.483341] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.545376] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696002, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.804598] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696003, 'name': Rename_Task, 'duration_secs': 0.744814} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.804879] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1665.805147] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbaa6dde-db06-4b92-87a8-efb7c0b2e977 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.814024] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1665.814024] env[62820]: value = "task-1696006" [ 1665.814024] env[62820]: _type = "Task" [ 1665.814024] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.814500] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3f818214-6ae8-4e70-83b3-bc1f899cd940 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "10f4cf46-89d2-4ac4-91d5-6626212f4f8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.492s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.823783] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.879871] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.878953} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.880478] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 53ba381a-9f81-4c37-8758-af56fc165dd7/53ba381a-9f81-4c37-8758-af56fc165dd7.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1665.880702] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1665.881023] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2529ec12-c8f7-4dd1-9c36-6a7d7a624787 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.890462] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1665.890462] env[62820]: value = "task-1696007" [ 1665.890462] env[62820]: _type = "Task" [ 1665.890462] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.900819] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.986171] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696005, 'name': PowerOffVM_Task, 'duration_secs': 0.268878} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.986445] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1665.986638] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1665.986826] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353507', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'name': 'volume-952365ce-b448-4f14-9fc5-f9b7c5180d39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b7c9f518-c908-42cc-ba09-59b0f8431f68', 'attached_at': '', 'detached_at': '', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'serial': '952365ce-b448-4f14-9fc5-f9b7c5180d39'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1665.987644] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737a30ee-d077-4110-b60a-2061a285dba2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.006271] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f767c2-bd1f-49ac-a8ef-b61c9bf73d46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.014013] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1aa8562-6736-4c6b-877c-fa7fdca85aeb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.037316] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3830e2-e757-446c-8870-9d0ec91430c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.042105] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696002, 'name': ReconfigVM_Task, 'duration_secs': 1.276392} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.051934] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8/29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1666.052684] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] The volume has not been displaced from its original location: [datastore1] volume-952365ce-b448-4f14-9fc5-f9b7c5180d39/volume-952365ce-b448-4f14-9fc5-f9b7c5180d39.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1666.057872] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1666.058185] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e776ef1d-a2d4-4364-a916-8d6d42916af5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.059820] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa077945-c804-4fee-9841-fc6ec679ea0d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.081451] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1666.081451] env[62820]: value = "task-1696008" [ 1666.081451] env[62820]: _type = "Task" [ 1666.081451] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.082965] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1666.082965] env[62820]: value = "task-1696009" [ 1666.082965] env[62820]: _type = "Task" [ 1666.082965] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.094882] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696008, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.097840] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696009, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.328494] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696006, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.405713] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071201} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.408254] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1666.412098] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca6ddd3-528d-4680-a120-dbcabc821f70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.436038] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 53ba381a-9f81-4c37-8758-af56fc165dd7/53ba381a-9f81-4c37-8758-af56fc165dd7.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1666.440831] env[62820]: DEBUG nova.compute.manager [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1666.441212] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0226d287-7992-44b2-a65e-d660cc9d12a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.459757] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4170e6-67eb-4c2e-b183-f9225e976220 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.477819] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1666.477819] env[62820]: value = "task-1696010" [ 1666.477819] env[62820]: _type = "Task" [ 1666.477819] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.487181] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696010, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.598789] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696008, 'name': Rename_Task, 'duration_secs': 0.393214} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.602479] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1666.602898] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696009, 'name': ReconfigVM_Task, 'duration_secs': 0.162088} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.605901] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c7b69ee-0e11-402f-b95f-1e916db0de44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.607584] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1666.613619] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-512e9307-87cb-43ed-9676-9c19df77919a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.630890] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1666.630890] env[62820]: value = "task-1696011" [ 1666.630890] env[62820]: _type = "Task" [ 1666.630890] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.635267] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1666.635267] env[62820]: value = "task-1696012" [ 1666.635267] env[62820]: _type = "Task" [ 1666.635267] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.648490] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696011, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.652073] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.721731] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560bc48e-1328-4dde-8912-908c0ba82a3c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.731976] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba08b7d-04fd-4df7-a77b-633b3a0ac9a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.766698] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da99e043-6718-49e2-85dc-e97d15414780 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.775289] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d9dc25-8ae3-41d5-9232-a10eddd6f9a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.793628] env[62820]: DEBUG nova.compute.provider_tree [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1666.833630] env[62820]: DEBUG oslo_vmware.api [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696006, 'name': PowerOnVM_Task, 'duration_secs': 0.866209} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.833949] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1666.834186] env[62820]: DEBUG nova.compute.manager [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1666.834978] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8452faff-e0a9-451c-91b2-41ec9649c66a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.981558] env[62820]: INFO nova.compute.manager [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] instance snapshotting [ 1666.981750] env[62820]: DEBUG nova.objects.instance [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'flavor' on Instance uuid 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1666.993170] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696010, 'name': ReconfigVM_Task, 'duration_secs': 0.304067} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.993545] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 53ba381a-9f81-4c37-8758-af56fc165dd7/53ba381a-9f81-4c37-8758-af56fc165dd7.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1666.994202] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7393a200-2310-480b-ab44-826cdca0e5a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.000499] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1667.000499] env[62820]: value = "task-1696013" [ 1667.000499] env[62820]: _type = "Task" [ 1667.000499] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.010639] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696013, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.150823] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696012, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.154465] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696011, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.300649] env[62820]: DEBUG nova.scheduler.client.report [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1667.352466] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.490605] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07dbd66c-c692-4f45-893e-7b410009dc47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.514324] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8e8a6c-ee32-4c23-9c10-259b0ff61ec1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.526437] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696013, 'name': Rename_Task, 'duration_secs': 0.134784} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.526921] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1667.527229] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0dc9276-592e-4b42-b1de-f895f8899c1a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.534329] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1667.534329] env[62820]: value = "task-1696014" [ 1667.534329] env[62820]: _type = "Task" [ 1667.534329] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.543313] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.647252] env[62820]: DEBUG oslo_vmware.api [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696011, 'name': PowerOnVM_Task, 'duration_secs': 0.755469} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.647942] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1667.648139] env[62820]: INFO nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Took 10.16 seconds to spawn the instance on the hypervisor. [ 1667.648283] env[62820]: DEBUG nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1667.649066] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5041f9a9-3278-4021-b284-2ad2a0440cb5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.654436] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696012, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.806820] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.535s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.807476] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1667.810642] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.688s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.812966] env[62820]: INFO nova.compute.claims [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1668.029875] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1668.030378] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fd0a89be-6a31-4fa4-b05d-44cba24b4921 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.044395] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1668.044395] env[62820]: value = "task-1696015" [ 1668.044395] env[62820]: _type = "Task" [ 1668.044395] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.054111] env[62820]: DEBUG oslo_vmware.api [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696014, 'name': PowerOnVM_Task, 'duration_secs': 0.450809} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.055055] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1668.055531] env[62820]: INFO nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Took 7.57 seconds to spawn the instance on the hypervisor. [ 1668.055897] env[62820]: DEBUG nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1668.060639] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef88b3a1-2a34-41b9-9ee1-2961daee358d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.064480] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696015, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.151275] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696012, 'name': ReconfigVM_Task, 'duration_secs': 1.236584} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.151634] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353507', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'name': 'volume-952365ce-b448-4f14-9fc5-f9b7c5180d39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b7c9f518-c908-42cc-ba09-59b0f8431f68', 'attached_at': '', 'detached_at': '', 'volume_id': '952365ce-b448-4f14-9fc5-f9b7c5180d39', 'serial': '952365ce-b448-4f14-9fc5-f9b7c5180d39'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1668.151954] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1668.152752] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44700520-156e-4fc6-9a03-b24accfdd413 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.159495] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1668.159669] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c0ab661-30c1-4cd8-8a84-3ace20bc3139 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.170134] env[62820]: INFO nova.compute.manager [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Took 36.73 seconds to build instance. [ 1668.318617] env[62820]: DEBUG nova.compute.utils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1668.323014] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1668.324881] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1668.326614] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.328406] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.328406] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.328406] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.328406] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.331727] env[62820]: INFO nova.compute.manager [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Terminating instance [ 1668.338882] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1668.339041] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1668.339355] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Deleting the datastore file [datastore1] b7c9f518-c908-42cc-ba09-59b0f8431f68 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1668.339527] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8da76b77-18f3-497b-a0f5-c813c5837cc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.347348] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for the task: (returnval){ [ 1668.347348] env[62820]: value = "task-1696017" [ 1668.347348] env[62820]: _type = "Task" [ 1668.347348] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.358925] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.554444] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696015, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.582954] env[62820]: INFO nova.compute.manager [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Took 35.26 seconds to build instance. [ 1668.599789] env[62820]: DEBUG nova.policy [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e757d7fd8474b04903db4fb76781717', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c944a7dcf084460f9fb13731534ed788', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1668.672563] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8e672238-f9c8-4df3-866b-821ea1e94b38 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.236s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.823730] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1668.836271] env[62820]: DEBUG nova.compute.manager [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1668.836513] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1668.837446] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd4797d-8c2c-44f8-bf79-71949c0f5421 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.848829] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1668.857067] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32f0169e-0b6a-4f3a-ba20-12a800438e34 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.877500] env[62820]: DEBUG oslo_vmware.api [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Task: {'id': task-1696017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.117927} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.877915] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1668.878173] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1668.878454] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1668.878713] env[62820]: INFO nova.compute.manager [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Took 3.41 seconds to destroy the instance on the hypervisor. [ 1668.879049] env[62820]: DEBUG oslo.service.loopingcall [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1668.880934] env[62820]: DEBUG nova.compute.manager [-] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1668.880934] env[62820]: DEBUG nova.network.neutron [-] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1668.883538] env[62820]: DEBUG oslo_vmware.api [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1668.883538] env[62820]: value = "task-1696018" [ 1668.883538] env[62820]: _type = "Task" [ 1668.883538] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.892503] env[62820]: DEBUG oslo_vmware.api [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.925811] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "860637a2-8c59-42af-a9f5-4e80c5466274" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.926168] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.953843] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Successfully created port: 364be5e6-c3f1-45ae-97ca-f068e0cfeab6 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1669.055949] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696015, 'name': CreateSnapshot_Task, 'duration_secs': 0.944336} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.055949] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1669.056471] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c743f5e2-32d6-451e-bc2e-58aafa5b8ee1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.086995] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9c62f3eb-8b0f-4d5a-8f17-9755c624b2de tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.772s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.208654] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57b2c2a-39fc-4c7b-a936-29b01b46de40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.216675] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5355916b-95c4-4b8a-ac2c-47e26c1ecf8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.248424] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3537ccb-54ee-44e1-a405-aed128f8adb0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.256344] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cceee71-3458-4cd9-a8f5-d7d5f7126ded {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.270531] env[62820]: DEBUG nova.compute.provider_tree [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1669.392891] env[62820]: DEBUG oslo_vmware.api [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696018, 'name': PowerOffVM_Task, 'duration_secs': 0.30658} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.393264] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1669.393443] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1669.394670] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9eec8f1-1dfc-4032-bdd2-880c72cf4cf5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.432730] env[62820]: DEBUG nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1669.484966] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1669.485209] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1669.485396] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleting the datastore file [datastore1] 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1669.486197] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0762def-390e-49c7-9d9f-620fa01dcb1c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.493747] env[62820]: DEBUG oslo_vmware.api [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1669.493747] env[62820]: value = "task-1696020" [ 1669.493747] env[62820]: _type = "Task" [ 1669.493747] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.502059] env[62820]: DEBUG oslo_vmware.api [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696020, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.578691] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1669.579080] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9931eb0e-992e-42b5-82d4-1c0396b540cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.590023] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1669.590023] env[62820]: value = "task-1696021" [ 1669.590023] env[62820]: _type = "Task" [ 1669.590023] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.598912] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696021, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.773976] env[62820]: DEBUG nova.scheduler.client.report [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1669.835786] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1669.877921] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1669.878314] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1669.878588] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1669.878837] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1669.879081] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1669.879457] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1669.883376] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1669.883554] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1669.883733] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1669.883901] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1669.884094] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1669.885427] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe763011-6274-4a10-b375-090ed9352c0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.893541] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec35330c-029c-4693-955b-2831d658409c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.921124] env[62820]: DEBUG oslo_concurrency.lockutils [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "53ba381a-9f81-4c37-8758-af56fc165dd7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.921426] env[62820]: DEBUG oslo_concurrency.lockutils [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.921627] env[62820]: DEBUG nova.compute.manager [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1669.922527] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2dcfb01-2096-42e8-8643-99294350b650 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.930453] env[62820]: DEBUG nova.compute.manager [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1669.931324] env[62820]: DEBUG nova.objects.instance [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'flavor' on Instance uuid 53ba381a-9f81-4c37-8758-af56fc165dd7 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.973017] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.003590] env[62820]: DEBUG oslo_vmware.api [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696020, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.385922} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.003893] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1670.004297] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1670.004396] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1670.004550] env[62820]: INFO nova.compute.manager [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1670.004827] env[62820]: DEBUG oslo.service.loopingcall [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1670.005215] env[62820]: DEBUG nova.compute.manager [-] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1670.005215] env[62820]: DEBUG nova.network.neutron [-] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1670.037255] env[62820]: DEBUG nova.compute.manager [req-32444bcb-2e59-469c-aabd-22c2e83c0b94 req-fba05596-75a1-4679-bb48-fa2871fde20e service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Received event network-vif-deleted-cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1670.037488] env[62820]: INFO nova.compute.manager [req-32444bcb-2e59-469c-aabd-22c2e83c0b94 req-fba05596-75a1-4679-bb48-fa2871fde20e service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Neutron deleted interface cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5; detaching it from the instance and deleting it from the info cache [ 1670.037693] env[62820]: DEBUG nova.network.neutron [req-32444bcb-2e59-469c-aabd-22c2e83c0b94 req-fba05596-75a1-4679-bb48-fa2871fde20e service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.101675] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696021, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.280230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.280871] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1670.283518] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.951s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.285013] env[62820]: INFO nova.compute.claims [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1670.456880] env[62820]: DEBUG nova.network.neutron [-] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.474790] env[62820]: DEBUG nova.compute.manager [req-dec8c34f-643a-47ed-9ee2-7f1bf509b12b req-50e29e5f-6fb3-48a2-859f-43e67b9d498f service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Received event network-vif-plugged-364be5e6-c3f1-45ae-97ca-f068e0cfeab6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1670.474894] env[62820]: DEBUG oslo_concurrency.lockutils [req-dec8c34f-643a-47ed-9ee2-7f1bf509b12b req-50e29e5f-6fb3-48a2-859f-43e67b9d498f service nova] Acquiring lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.475123] env[62820]: DEBUG oslo_concurrency.lockutils [req-dec8c34f-643a-47ed-9ee2-7f1bf509b12b req-50e29e5f-6fb3-48a2-859f-43e67b9d498f service nova] Lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.475292] env[62820]: DEBUG oslo_concurrency.lockutils [req-dec8c34f-643a-47ed-9ee2-7f1bf509b12b req-50e29e5f-6fb3-48a2-859f-43e67b9d498f service nova] Lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.475460] env[62820]: DEBUG nova.compute.manager [req-dec8c34f-643a-47ed-9ee2-7f1bf509b12b req-50e29e5f-6fb3-48a2-859f-43e67b9d498f service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] No waiting events found dispatching network-vif-plugged-364be5e6-c3f1-45ae-97ca-f068e0cfeab6 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1670.475622] env[62820]: WARNING nova.compute.manager [req-dec8c34f-643a-47ed-9ee2-7f1bf509b12b req-50e29e5f-6fb3-48a2-859f-43e67b9d498f service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Received unexpected event network-vif-plugged-364be5e6-c3f1-45ae-97ca-f068e0cfeab6 for instance with vm_state building and task_state spawning. [ 1670.546675] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a8b6dba-1ec6-49e6-95ab-a31b1ee7ecc1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.556061] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef434c1-f901-4152-b538-6b0a4298b50b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.592546] env[62820]: DEBUG nova.compute.manager [req-32444bcb-2e59-469c-aabd-22c2e83c0b94 req-fba05596-75a1-4679-bb48-fa2871fde20e service nova] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Detach interface failed, port_id=cd6ae25c-d89a-4a19-8c7a-52ac8ae069b5, reason: Instance b7c9f518-c908-42cc-ba09-59b0f8431f68 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1670.601329] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696021, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.641026] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquiring lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.641311] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.641889] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquiring lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.642121] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.642303] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.647023] env[62820]: INFO nova.compute.manager [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Terminating instance [ 1670.709181] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Successfully updated port: 364be5e6-c3f1-45ae-97ca-f068e0cfeab6 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1670.792181] env[62820]: DEBUG nova.compute.utils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1670.792664] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1670.792998] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1670.881390] env[62820]: DEBUG nova.policy [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e757d7fd8474b04903db4fb76781717', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c944a7dcf084460f9fb13731534ed788', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1670.943830] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1670.944935] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b07612d-2c27-4352-8078-cd7e3a24daa6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.955770] env[62820]: DEBUG oslo_vmware.api [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1670.955770] env[62820]: value = "task-1696022" [ 1670.955770] env[62820]: _type = "Task" [ 1670.955770] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.961122] env[62820]: INFO nova.compute.manager [-] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Took 2.08 seconds to deallocate network for instance. [ 1670.965109] env[62820]: DEBUG oslo_vmware.api [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.992637] env[62820]: DEBUG nova.network.neutron [-] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.107076] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696021, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.152242] env[62820]: DEBUG nova.compute.manager [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1671.152242] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1671.152242] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60145c38-675f-448f-9b3c-0e0a3d9a4965 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.160355] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1671.160633] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f2014b6-29e9-423f-a39b-e30d25c0679f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.169244] env[62820]: DEBUG oslo_vmware.api [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1671.169244] env[62820]: value = "task-1696023" [ 1671.169244] env[62820]: _type = "Task" [ 1671.169244] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.179912] env[62820]: DEBUG oslo_vmware.api [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.212666] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Successfully created port: 51b13f24-958b-455b-b09e-8a78b1c92de2 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1671.216353] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "refresh_cache-0774673f-e7f2-46ce-b9ec-8fadb36ce192" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.216353] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "refresh_cache-0774673f-e7f2-46ce-b9ec-8fadb36ce192" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.216353] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1671.295890] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1671.466040] env[62820]: DEBUG oslo_vmware.api [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696022, 'name': PowerOffVM_Task, 'duration_secs': 0.243725} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.466040] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1671.466040] env[62820]: DEBUG nova.compute.manager [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1671.466436] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ea5cf6-3d4f-423c-af7f-9d4bc8a488d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.502483] env[62820]: INFO nova.compute.manager [-] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Took 1.50 seconds to deallocate network for instance. [ 1671.527576] env[62820]: INFO nova.compute.manager [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Took 0.57 seconds to detach 1 volumes for instance. [ 1671.533349] env[62820]: DEBUG nova.compute.manager [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Deleting volume: 952365ce-b448-4f14-9fc5-f9b7c5180d39 {{(pid=62820) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1671.613220] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696021, 'name': CloneVM_Task, 'duration_secs': 1.904139} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.613220] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Created linked-clone VM from snapshot [ 1671.613220] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66e3d81-a33f-4d06-915b-7979ce655300 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.622716] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Uploading image b5637a72-690d-4ce3-99c3-dc6f93341a35 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1671.658173] env[62820]: DEBUG oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1671.658173] env[62820]: value = "vm-353616" [ 1671.658173] env[62820]: _type = "VirtualMachine" [ 1671.658173] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1671.658916] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3523e337-eee1-477c-9824-6974b96ea540 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.669169] env[62820]: DEBUG oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease: (returnval){ [ 1671.669169] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52889845-ad8f-0e72-5817-c58edefb6bb3" [ 1671.669169] env[62820]: _type = "HttpNfcLease" [ 1671.669169] env[62820]: } obtained for exporting VM: (result){ [ 1671.669169] env[62820]: value = "vm-353616" [ 1671.669169] env[62820]: _type = "VirtualMachine" [ 1671.669169] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1671.669486] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the lease: (returnval){ [ 1671.669486] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52889845-ad8f-0e72-5817-c58edefb6bb3" [ 1671.669486] env[62820]: _type = "HttpNfcLease" [ 1671.669486] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1671.683161] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1671.683161] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52889845-ad8f-0e72-5817-c58edefb6bb3" [ 1671.683161] env[62820]: _type = "HttpNfcLease" [ 1671.683161] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1671.689658] env[62820]: DEBUG oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1671.689658] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52889845-ad8f-0e72-5817-c58edefb6bb3" [ 1671.689658] env[62820]: _type = "HttpNfcLease" [ 1671.689658] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1671.689962] env[62820]: DEBUG oslo_vmware.api [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696023, 'name': PowerOffVM_Task, 'duration_secs': 0.311141} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.693076] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112f6619-e23f-4b11-a6eb-69910255225a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.695476] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1671.695687] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1671.696788] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b609b1b5-510e-403c-8a5c-b22eb16d8acf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.703238] env[62820]: DEBUG oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b3b9c-080d-9e8a-9c3e-e2451f196a75/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1671.703425] env[62820]: DEBUG oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b3b9c-080d-9e8a-9c3e-e2451f196a75/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1671.767679] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b285db2-12df-49a1-a698-caf19347347c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.773738] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1671.775605] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1671.775605] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Deleting the datastore file [datastore1] 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1671.775605] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0eef0322-bcca-4048-a91e-251cd09dd107 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.779853] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767df596-aaac-4919-a214-11554a58bc7f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.784282] env[62820]: DEBUG oslo_vmware.api [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for the task: (returnval){ [ 1671.784282] env[62820]: value = "task-1696027" [ 1671.784282] env[62820]: _type = "Task" [ 1671.784282] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.821539] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d65d108-c55c-4f97-be0f-0c66a8171b88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.825637] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1671.830636] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c79a9dbe-2d67-4378-acd3-3f375c547881 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.833092] env[62820]: DEBUG oslo_vmware.api [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.839207] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca26353b-693b-466d-a32e-b165b2c159e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.854349] env[62820]: DEBUG nova.compute.provider_tree [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1671.984109] env[62820]: DEBUG oslo_concurrency.lockutils [None req-95d284d7-95b3-45f1-8d92-dc3141850cc0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.063s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.010894] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.105601] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.150498] env[62820]: DEBUG nova.compute.manager [req-6ea34e5a-7328-4a82-843f-8c5cfaec8d1c req-f29dc684-237a-4439-9817-1470b7578524 service nova] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Received event network-vif-deleted-7cc832bc-1ef4-4db9-9b80-de3bcd73a298 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1672.161771] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Updating instance_info_cache with network_info: [{"id": "364be5e6-c3f1-45ae-97ca-f068e0cfeab6", "address": "fa:16:3e:d7:41:91", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap364be5e6-c3", "ovs_interfaceid": "364be5e6-c3f1-45ae-97ca-f068e0cfeab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.296558] env[62820]: DEBUG oslo_vmware.api [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Task: {'id': task-1696027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128741} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.296934] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1672.298635] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1672.298635] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1672.298635] env[62820]: INFO nova.compute.manager [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1672.298635] env[62820]: DEBUG oslo.service.loopingcall [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1672.298635] env[62820]: DEBUG nova.compute.manager [-] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1672.298635] env[62820]: DEBUG nova.network.neutron [-] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1672.323832] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1672.350627] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1672.351088] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1672.351328] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1672.351538] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1672.351868] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1672.352159] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1672.352422] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1672.352773] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1672.352997] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1672.353276] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1672.353523] env[62820]: DEBUG nova.virt.hardware [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1672.354750] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568323ec-3ff1-423f-80f6-eea575f6471a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.360180] env[62820]: DEBUG nova.scheduler.client.report [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1672.370471] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892cc2ed-265d-4a5d-bb3c-b935ca28a321 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.608351] env[62820]: DEBUG nova.compute.manager [req-4028a289-1090-4fa8-a89f-c9f813cf3783 req-beff5535-063b-46bb-91a6-2a8ffbbe29e1 service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Received event network-changed-364be5e6-c3f1-45ae-97ca-f068e0cfeab6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1672.613551] env[62820]: DEBUG nova.compute.manager [req-4028a289-1090-4fa8-a89f-c9f813cf3783 req-beff5535-063b-46bb-91a6-2a8ffbbe29e1 service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Refreshing instance network info cache due to event network-changed-364be5e6-c3f1-45ae-97ca-f068e0cfeab6. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1672.614578] env[62820]: DEBUG oslo_concurrency.lockutils [req-4028a289-1090-4fa8-a89f-c9f813cf3783 req-beff5535-063b-46bb-91a6-2a8ffbbe29e1 service nova] Acquiring lock "refresh_cache-0774673f-e7f2-46ce-b9ec-8fadb36ce192" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.645566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "53ba381a-9f81-4c37-8758-af56fc165dd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.645566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.645566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "53ba381a-9f81-4c37-8758-af56fc165dd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.645566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.645566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.647762] env[62820]: INFO nova.compute.manager [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Terminating instance [ 1672.668197] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "refresh_cache-0774673f-e7f2-46ce-b9ec-8fadb36ce192" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.668197] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Instance network_info: |[{"id": "364be5e6-c3f1-45ae-97ca-f068e0cfeab6", "address": "fa:16:3e:d7:41:91", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap364be5e6-c3", "ovs_interfaceid": "364be5e6-c3f1-45ae-97ca-f068e0cfeab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1672.668197] env[62820]: DEBUG oslo_concurrency.lockutils [req-4028a289-1090-4fa8-a89f-c9f813cf3783 req-beff5535-063b-46bb-91a6-2a8ffbbe29e1 service nova] Acquired lock "refresh_cache-0774673f-e7f2-46ce-b9ec-8fadb36ce192" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.668197] env[62820]: DEBUG nova.network.neutron [req-4028a289-1090-4fa8-a89f-c9f813cf3783 req-beff5535-063b-46bb-91a6-2a8ffbbe29e1 service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Refreshing network info cache for port 364be5e6-c3f1-45ae-97ca-f068e0cfeab6 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1672.668197] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:41:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '364be5e6-c3f1-45ae-97ca-f068e0cfeab6', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1672.683101] env[62820]: DEBUG oslo.service.loopingcall [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1672.684366] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1672.685385] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63d2f966-8558-4db6-8e93-2f7416920996 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.712349] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1672.712349] env[62820]: value = "task-1696028" [ 1672.712349] env[62820]: _type = "Task" [ 1672.712349] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.724533] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696028, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.866322] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.867729] env[62820]: DEBUG nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1672.876815] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.768s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.974679] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Successfully updated port: 51b13f24-958b-455b-b09e-8a78b1c92de2 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1673.153231] env[62820]: DEBUG nova.compute.manager [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1673.153482] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1673.154436] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8ed158-3c14-4b56-a55e-da0895b14325 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.164329] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1673.164598] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aaaa2bd7-caa1-458d-b60a-72fabfbbf617 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.224079] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696028, 'name': CreateVM_Task, 'duration_secs': 0.361566} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.224079] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1673.224079] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.224079] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.224480] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1673.224680] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffbe3fa5-5420-40b2-a6f4-2064b858cef2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.230574] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1673.230574] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52607183-d984-4997-5c6b-0cd6f3b143db" [ 1673.230574] env[62820]: _type = "Task" [ 1673.230574] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.241783] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1673.242312] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1673.242691] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleting the datastore file [datastore1] 53ba381a-9f81-4c37-8758-af56fc165dd7 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1673.243487] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecb84a74-c24a-4afe-8402-936ceabb5d90 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.249466] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52607183-d984-4997-5c6b-0cd6f3b143db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.254895] env[62820]: DEBUG oslo_vmware.api [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1673.254895] env[62820]: value = "task-1696030" [ 1673.254895] env[62820]: _type = "Task" [ 1673.254895] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.266483] env[62820]: DEBUG oslo_vmware.api [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.284190] env[62820]: DEBUG nova.network.neutron [-] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.387959] env[62820]: DEBUG nova.compute.utils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1673.389971] env[62820]: DEBUG nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1673.389971] env[62820]: DEBUG nova.network.neutron [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1673.461755] env[62820]: DEBUG nova.policy [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba8e4dc4cd634bf293d02187fbc77b72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ca1b6f7bda3437eb67f5f765b5864a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1673.478164] env[62820]: DEBUG nova.network.neutron [req-4028a289-1090-4fa8-a89f-c9f813cf3783 req-beff5535-063b-46bb-91a6-2a8ffbbe29e1 service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Updated VIF entry in instance network info cache for port 364be5e6-c3f1-45ae-97ca-f068e0cfeab6. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1673.478164] env[62820]: DEBUG nova.network.neutron [req-4028a289-1090-4fa8-a89f-c9f813cf3783 req-beff5535-063b-46bb-91a6-2a8ffbbe29e1 service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Updating instance_info_cache with network_info: [{"id": "364be5e6-c3f1-45ae-97ca-f068e0cfeab6", "address": "fa:16:3e:d7:41:91", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap364be5e6-c3", "ovs_interfaceid": "364be5e6-c3f1-45ae-97ca-f068e0cfeab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.479305] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "refresh_cache-8a105764-ebd9-4c0a-b555-c5fd5ea8684d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.479786] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "refresh_cache-8a105764-ebd9-4c0a-b555-c5fd5ea8684d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.479786] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1673.746543] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52607183-d984-4997-5c6b-0cd6f3b143db, 'name': SearchDatastore_Task, 'duration_secs': 0.021212} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.746543] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.746543] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1673.746543] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.746543] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.746961] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.746999] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1bcb916-5eec-421e-bf68-f94b55065209 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.761047] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.761047] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1673.761047] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9cc1602-3f65-445a-8d21-de72b12e16a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.767804] env[62820]: DEBUG oslo_vmware.api [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136952} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.769352] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.769714] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1673.770166] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1673.770439] env[62820]: INFO nova.compute.manager [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1673.770738] env[62820]: DEBUG oslo.service.loopingcall [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.771151] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1673.771151] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527a9181-3027-7ff2-f850-90bc31d4ade4" [ 1673.771151] env[62820]: _type = "Task" [ 1673.771151] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.771346] env[62820]: DEBUG nova.compute.manager [-] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1673.771443] env[62820]: DEBUG nova.network.neutron [-] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1673.781976] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527a9181-3027-7ff2-f850-90bc31d4ade4, 'name': SearchDatastore_Task, 'duration_secs': 0.008357} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.783066] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7f0e990-2ad0-45ca-be27-fae034930a36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.787637] env[62820]: INFO nova.compute.manager [-] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Took 1.49 seconds to deallocate network for instance. [ 1673.790244] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1673.790244] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b9010f-34d7-c81d-cca9-14ea315f7604" [ 1673.790244] env[62820]: _type = "Task" [ 1673.790244] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.808807] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b9010f-34d7-c81d-cca9-14ea315f7604, 'name': SearchDatastore_Task, 'duration_secs': 0.009854} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.809178] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.809473] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0774673f-e7f2-46ce-b9ec-8fadb36ce192/0774673f-e7f2-46ce-b9ec-8fadb36ce192.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1673.809769] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b336c4d-1d8a-4cf5-a815-36f926a0e172 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.817966] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1673.817966] env[62820]: value = "task-1696031" [ 1673.817966] env[62820]: _type = "Task" [ 1673.817966] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.832034] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.897590] env[62820]: DEBUG nova.network.neutron [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Successfully created port: ab39f297-7fa5-430b-ba72-0857fd452878 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1673.900279] env[62820]: DEBUG nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1673.927879] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b7c9f518-c908-42cc-ba09-59b0f8431f68 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1673.928141] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b89d32f8-0675-4b0c-977e-b7900e62bdd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.928300] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance eafe98b7-a67d-4bab-bfc0-8367ae069d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0dd0e112-7a7c-4b37-8938-bb98aab2d485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 7a755ef6-67bc-4242-9343-c54c8566adf8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a8486f52-998d-4308-813a-9c651e2eb093 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 210277a2-dd10-4e08-8627-4b025a554410 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 11843b38-3ce4-42a7-b855-a9d0b473e796 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 3228cd34-2144-425a-aca6-400cb0991e43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance e420644c-cfcc-4f8c-ae03-c9ebef585690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 46434419-d6de-4cc1-905c-14698512b7a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 5fbb6021-ca7d-4cce-90c9-113b7d833d49 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1673.932992] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.932992] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 53ba381a-9f81-4c37-8758-af56fc165dd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.935495] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0774673f-e7f2-46ce-b9ec-8fadb36ce192 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.935495] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 8a105764-ebd9-4c0a-b555-c5fd5ea8684d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.935495] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 6da857ea-f213-4b17-9e9f-d74d1ea649c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1673.982022] env[62820]: DEBUG oslo_concurrency.lockutils [req-4028a289-1090-4fa8-a89f-c9f813cf3783 req-beff5535-063b-46bb-91a6-2a8ffbbe29e1 service nova] Releasing lock "refresh_cache-0774673f-e7f2-46ce-b9ec-8fadb36ce192" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.031760] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1674.081037] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquiring lock "76bd4a09-300d-460e-8442-21b4f6567698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.082495] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "76bd4a09-300d-460e-8442-21b4f6567698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.211585] env[62820]: DEBUG nova.compute.manager [req-76fdc9c3-508c-444a-a1d4-0980c8180862 req-0b785e03-74a6-4aff-b0ed-b491db1a7a0c service nova] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Received event network-vif-deleted-c130871a-4770-4d22-83ec-723c4773ee6c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1674.298540] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.306099] env[62820]: DEBUG nova.network.neutron [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Updating instance_info_cache with network_info: [{"id": "51b13f24-958b-455b-b09e-8a78b1c92de2", "address": "fa:16:3e:ae:1a:20", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51b13f24-95", "ovs_interfaceid": "51b13f24-958b-455b-b09e-8a78b1c92de2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.331057] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696031, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.438354] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 03b0abc8-dd32-4cf9-8750-d64b8a66695e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1674.584702] env[62820]: DEBUG nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1674.641306] env[62820]: DEBUG nova.compute.manager [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Received event network-vif-plugged-51b13f24-958b-455b-b09e-8a78b1c92de2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1674.641557] env[62820]: DEBUG oslo_concurrency.lockutils [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] Acquiring lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.641868] env[62820]: DEBUG oslo_concurrency.lockutils [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] Lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.642099] env[62820]: DEBUG oslo_concurrency.lockutils [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] Lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.642312] env[62820]: DEBUG nova.compute.manager [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] No waiting events found dispatching network-vif-plugged-51b13f24-958b-455b-b09e-8a78b1c92de2 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1674.642558] env[62820]: WARNING nova.compute.manager [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Received unexpected event network-vif-plugged-51b13f24-958b-455b-b09e-8a78b1c92de2 for instance with vm_state building and task_state spawning. [ 1674.642769] env[62820]: DEBUG nova.compute.manager [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Received event network-changed-51b13f24-958b-455b-b09e-8a78b1c92de2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1674.642939] env[62820]: DEBUG nova.compute.manager [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Refreshing instance network info cache due to event network-changed-51b13f24-958b-455b-b09e-8a78b1c92de2. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1674.643166] env[62820]: DEBUG oslo_concurrency.lockutils [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] Acquiring lock "refresh_cache-8a105764-ebd9-4c0a-b555-c5fd5ea8684d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.778110] env[62820]: DEBUG nova.network.neutron [-] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.807749] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "refresh_cache-8a105764-ebd9-4c0a-b555-c5fd5ea8684d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.808138] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Instance network_info: |[{"id": "51b13f24-958b-455b-b09e-8a78b1c92de2", "address": "fa:16:3e:ae:1a:20", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51b13f24-95", "ovs_interfaceid": "51b13f24-958b-455b-b09e-8a78b1c92de2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1674.808428] env[62820]: DEBUG oslo_concurrency.lockutils [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] Acquired lock "refresh_cache-8a105764-ebd9-4c0a-b555-c5fd5ea8684d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.808603] env[62820]: DEBUG nova.network.neutron [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Refreshing network info cache for port 51b13f24-958b-455b-b09e-8a78b1c92de2 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1674.810607] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:1a:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51b13f24-958b-455b-b09e-8a78b1c92de2', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1674.819025] env[62820]: DEBUG oslo.service.loopingcall [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1674.820325] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1674.821220] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef10b22f-ceaa-4564-b90c-36cc0d5cd2b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.851565] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696031, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540822} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.855024] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 0774673f-e7f2-46ce-b9ec-8fadb36ce192/0774673f-e7f2-46ce-b9ec-8fadb36ce192.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1674.855024] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1674.855024] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1674.855024] env[62820]: value = "task-1696032" [ 1674.855024] env[62820]: _type = "Task" [ 1674.855024] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.855024] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f8258fa-e4cc-4b06-938f-1574ab8003ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.865052] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696032, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.866494] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1674.866494] env[62820]: value = "task-1696033" [ 1674.866494] env[62820]: _type = "Task" [ 1674.866494] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.912967] env[62820]: DEBUG nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1674.941266] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 860637a2-8c59-42af-a9f5-4e80c5466274 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1674.944927] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1674.945212] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1674.945382] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1674.945569] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1674.945726] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1674.945881] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1674.946118] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1674.946297] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1674.946481] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1674.946659] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1674.946838] env[62820]: DEBUG nova.virt.hardware [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1674.947767] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88b20bf-411b-41e7-abfa-3777fc16b178 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.957275] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045cc9c0-cffc-4ccb-a998-c41f2702b037 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.111624] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.284871] env[62820]: INFO nova.compute.manager [-] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Took 1.51 seconds to deallocate network for instance. [ 1675.365785] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696032, 'name': CreateVM_Task, 'duration_secs': 0.345935} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.369023] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1675.369023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.369023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.369023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1675.369023] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecbe5322-50ee-4281-be7c-6c84da49000c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.377067] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1675.377067] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525e8e6b-d23f-691f-ea72-e4b81e7214ad" [ 1675.377067] env[62820]: _type = "Task" [ 1675.377067] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.382086] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696033, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082375} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.385222] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1675.386079] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddc9c15-abdd-499f-85f1-87032ed3a9a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.393862] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525e8e6b-d23f-691f-ea72-e4b81e7214ad, 'name': SearchDatastore_Task, 'duration_secs': 0.011409} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.403187] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.403496] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1675.403767] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.403922] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.404120] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1675.413533] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 0774673f-e7f2-46ce-b9ec-8fadb36ce192/0774673f-e7f2-46ce-b9ec-8fadb36ce192.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1675.416764] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2d3003a-8994-413a-b45b-8880378fa84c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.419095] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-990d960a-65d0-4576-abd6-35a10d0bc3cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.444103] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1675.444103] env[62820]: value = "task-1696034" [ 1675.444103] env[62820]: _type = "Task" [ 1675.444103] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.445597] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1675.445871] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1675.449495] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d4d5c54-0df8-4b83-bee3-3a4268e74d8d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.452130] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 76bd4a09-300d-460e-8442-21b4f6567698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1675.452390] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1675.452537] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1675.462456] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.463898] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1675.463898] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52797874-1ffb-98d3-d1be-a3883a65d8e8" [ 1675.463898] env[62820]: _type = "Task" [ 1675.463898] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.472450] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52797874-1ffb-98d3-d1be-a3883a65d8e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.613428] env[62820]: DEBUG nova.network.neutron [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Updated VIF entry in instance network info cache for port 51b13f24-958b-455b-b09e-8a78b1c92de2. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1675.613739] env[62820]: DEBUG nova.network.neutron [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Updating instance_info_cache with network_info: [{"id": "51b13f24-958b-455b-b09e-8a78b1c92de2", "address": "fa:16:3e:ae:1a:20", "network": {"id": "03a6bc0f-7e4c-4a58-8fe8-3f25546f012f", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-933330334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c944a7dcf084460f9fb13731534ed788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51b13f24-95", "ovs_interfaceid": "51b13f24-958b-455b-b09e-8a78b1c92de2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.790833] env[62820]: DEBUG nova.network.neutron [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Successfully updated port: ab39f297-7fa5-430b-ba72-0857fd452878 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1675.792915] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.839985] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352e22cd-4921-474a-a3f7-f64c09926582 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.848793] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e6e277-c007-403b-997d-a0220ce72654 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.889583] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3dc6e2-60ef-45a4-bf8c-b82172612de5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.895556] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fd6aad-08af-4f31-988f-a1d008779667 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.912303] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.955696] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696034, 'name': ReconfigVM_Task, 'duration_secs': 0.288766} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.956034] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 0774673f-e7f2-46ce-b9ec-8fadb36ce192/0774673f-e7f2-46ce-b9ec-8fadb36ce192.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1675.957020] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c9d148a-60a1-43db-8565-bafe844be551 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.963814] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1675.963814] env[62820]: value = "task-1696035" [ 1675.963814] env[62820]: _type = "Task" [ 1675.963814] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.975736] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696035, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.979714] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52797874-1ffb-98d3-d1be-a3883a65d8e8, 'name': SearchDatastore_Task, 'duration_secs': 0.011077} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.980581] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48c42576-b262-4d70-9ecd-85dd851ad056 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.986163] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1675.986163] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e0f830-d390-7290-4c3d-2bb184af8ac1" [ 1675.986163] env[62820]: _type = "Task" [ 1675.986163] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.995167] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e0f830-d390-7290-4c3d-2bb184af8ac1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.116618] env[62820]: DEBUG oslo_concurrency.lockutils [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] Releasing lock "refresh_cache-8a105764-ebd9-4c0a-b555-c5fd5ea8684d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.117281] env[62820]: DEBUG nova.compute.manager [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Received event network-vif-deleted-ed80036e-4305-4c40-823e-e6704c6b3aaf {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1676.117542] env[62820]: INFO nova.compute.manager [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Neutron deleted interface ed80036e-4305-4c40-823e-e6704c6b3aaf; detaching it from the instance and deleting it from the info cache [ 1676.117766] env[62820]: DEBUG nova.network.neutron [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.235299] env[62820]: DEBUG nova.compute.manager [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Received event network-vif-plugged-ab39f297-7fa5-430b-ba72-0857fd452878 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1676.235616] env[62820]: DEBUG oslo_concurrency.lockutils [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] Acquiring lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.235877] env[62820]: DEBUG oslo_concurrency.lockutils [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] Lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.236218] env[62820]: DEBUG oslo_concurrency.lockutils [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] Lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.236429] env[62820]: DEBUG nova.compute.manager [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] No waiting events found dispatching network-vif-plugged-ab39f297-7fa5-430b-ba72-0857fd452878 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1676.236602] env[62820]: WARNING nova.compute.manager [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Received unexpected event network-vif-plugged-ab39f297-7fa5-430b-ba72-0857fd452878 for instance with vm_state building and task_state spawning. [ 1676.236781] env[62820]: DEBUG nova.compute.manager [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Received event network-changed-ab39f297-7fa5-430b-ba72-0857fd452878 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1676.236938] env[62820]: DEBUG nova.compute.manager [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Refreshing instance network info cache due to event network-changed-ab39f297-7fa5-430b-ba72-0857fd452878. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1676.237140] env[62820]: DEBUG oslo_concurrency.lockutils [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] Acquiring lock "refresh_cache-6da857ea-f213-4b17-9e9f-d74d1ea649c7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.237277] env[62820]: DEBUG oslo_concurrency.lockutils [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] Acquired lock "refresh_cache-6da857ea-f213-4b17-9e9f-d74d1ea649c7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.237433] env[62820]: DEBUG nova.network.neutron [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Refreshing network info cache for port ab39f297-7fa5-430b-ba72-0857fd452878 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1676.294367] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "refresh_cache-6da857ea-f213-4b17-9e9f-d74d1ea649c7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.416062] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1676.474089] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696035, 'name': Rename_Task, 'duration_secs': 0.16308} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.474382] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1676.474631] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27e015ed-4424-479d-a7a2-9b7b419cc65a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.481759] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1676.481759] env[62820]: value = "task-1696036" [ 1676.481759] env[62820]: _type = "Task" [ 1676.481759] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.493394] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.499495] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e0f830-d390-7290-4c3d-2bb184af8ac1, 'name': SearchDatastore_Task, 'duration_secs': 0.012164} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.499759] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.500106] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 8a105764-ebd9-4c0a-b555-c5fd5ea8684d/8a105764-ebd9-4c0a-b555-c5fd5ea8684d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1676.500395] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e366ba3-bcad-44cf-9c37-52c746f9de11 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.507074] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1676.507074] env[62820]: value = "task-1696037" [ 1676.507074] env[62820]: _type = "Task" [ 1676.507074] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.514542] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696037, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.621259] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f932cc2c-c296-4dbf-8603-4ccd76fec18f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.631280] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ad1239-bddc-4911-9fc5-4063f593ac9e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.667750] env[62820]: DEBUG nova.compute.manager [req-41746751-c105-4e60-b924-2992ed09897f req-4a01843a-52c9-4089-8eaa-cef5487ff815 service nova] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Detach interface failed, port_id=ed80036e-4305-4c40-823e-e6704c6b3aaf, reason: Instance 53ba381a-9f81-4c37-8758-af56fc165dd7 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1676.778027] env[62820]: DEBUG nova.network.neutron [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1676.899157] env[62820]: DEBUG nova.network.neutron [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.921359] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1676.921578] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.047s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.921855] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.930s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.923769] env[62820]: INFO nova.compute.claims [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1676.993740] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696036, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.017574] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696037, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.402122] env[62820]: DEBUG oslo_concurrency.lockutils [req-30831610-34f9-474d-bdf3-69757831ff79 req-b76fafeb-2c3d-4b75-8f4b-d9ef0f2af170 service nova] Releasing lock "refresh_cache-6da857ea-f213-4b17-9e9f-d74d1ea649c7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.402534] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "refresh_cache-6da857ea-f213-4b17-9e9f-d74d1ea649c7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.402697] env[62820]: DEBUG nova.network.neutron [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1677.493390] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696036, 'name': PowerOnVM_Task, 'duration_secs': 0.571811} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.493526] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1677.493686] env[62820]: INFO nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Took 7.66 seconds to spawn the instance on the hypervisor. [ 1677.493914] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1677.494877] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7424cb8-b01a-4eef-b82c-5865ab284f0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.516847] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696037, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594683} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.517072] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 8a105764-ebd9-4c0a-b555-c5fd5ea8684d/8a105764-ebd9-4c0a-b555-c5fd5ea8684d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1677.517300] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1677.517555] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07099226-3f21-435f-8cf4-46892f57139b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.524164] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1677.524164] env[62820]: value = "task-1696038" [ 1677.524164] env[62820]: _type = "Task" [ 1677.524164] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.532335] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696038, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.942080] env[62820]: DEBUG nova.network.neutron [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1678.016502] env[62820]: INFO nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Took 21.95 seconds to build instance. [ 1678.040024] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696038, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072589} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.040024] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1678.040024] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87868256-263a-4e11-a0dc-c68371980d3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.066904] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 8a105764-ebd9-4c0a-b555-c5fd5ea8684d/8a105764-ebd9-4c0a-b555-c5fd5ea8684d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1678.073183] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bbb5d80-2fbb-4d81-ae60-372407bc38c4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.094800] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1678.094800] env[62820]: value = "task-1696039" [ 1678.094800] env[62820]: _type = "Task" [ 1678.094800] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.107719] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696039, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.181891] env[62820]: DEBUG nova.network.neutron [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Updating instance_info_cache with network_info: [{"id": "ab39f297-7fa5-430b-ba72-0857fd452878", "address": "fa:16:3e:67:00:92", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab39f297-7f", "ovs_interfaceid": "ab39f297-7fa5-430b-ba72-0857fd452878", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.378838] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f54d7a1-6f1b-4b00-9918-104381c804d3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.386712] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88c1d17-ec77-4785-b698-0757e35c7962 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.417187] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5db6b0e-511b-4599-bf62-a2f655a3760f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.425238] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d652aa49-04d6-4aed-bc35-28c40c8d5c30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.439040] env[62820]: DEBUG nova.compute.provider_tree [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1678.519074] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.462s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.604845] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696039, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.684915] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "refresh_cache-6da857ea-f213-4b17-9e9f-d74d1ea649c7" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.685335] env[62820]: DEBUG nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Instance network_info: |[{"id": "ab39f297-7fa5-430b-ba72-0857fd452878", "address": "fa:16:3e:67:00:92", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab39f297-7f", "ovs_interfaceid": "ab39f297-7fa5-430b-ba72-0857fd452878", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1678.685845] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:00:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab39f297-7fa5-430b-ba72-0857fd452878', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1678.693305] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating folder: Project (4ca1b6f7bda3437eb67f5f765b5864a9). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1678.693586] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f31993b9-be5d-453b-ba4a-630dd07609c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.703847] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created folder: Project (4ca1b6f7bda3437eb67f5f765b5864a9) in parent group-v353379. [ 1678.704030] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating folder: Instances. Parent ref: group-v353619. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1678.704292] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccb46660-6cdb-4bf0-93bd-e7da8616906d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.713265] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created folder: Instances in parent group-v353619. [ 1678.713529] env[62820]: DEBUG oslo.service.loopingcall [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1678.713741] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1678.713979] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69690510-4837-4786-8d4b-95aef09f7118 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.735372] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1678.735372] env[62820]: value = "task-1696042" [ 1678.735372] env[62820]: _type = "Task" [ 1678.735372] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.743462] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696042, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.943040] env[62820]: DEBUG nova.scheduler.client.report [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1679.105432] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696039, 'name': ReconfigVM_Task, 'duration_secs': 0.567749} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.105651] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 8a105764-ebd9-4c0a-b555-c5fd5ea8684d/8a105764-ebd9-4c0a-b555-c5fd5ea8684d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1679.106394] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6aa41ba-050a-4045-9265-bee47a896e0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.112962] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1679.112962] env[62820]: value = "task-1696043" [ 1679.112962] env[62820]: _type = "Task" [ 1679.112962] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.120950] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696043, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.245317] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696042, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.449030] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.527s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.449713] env[62820]: DEBUG nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1679.452283] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.080s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.452499] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.454518] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.102s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.454707] env[62820]: DEBUG nova.objects.instance [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1679.478774] env[62820]: INFO nova.scheduler.client.report [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleted allocations for instance 5fbb6021-ca7d-4cce-90c9-113b7d833d49 [ 1679.626537] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696043, 'name': Rename_Task, 'duration_secs': 0.194028} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.627014] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1679.627403] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3efccbcb-3668-4d7b-b3e2-edab5afb0654 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.636240] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1679.636240] env[62820]: value = "task-1696044" [ 1679.636240] env[62820]: _type = "Task" [ 1679.636240] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.645289] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696044, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.747656] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696042, 'name': CreateVM_Task, 'duration_secs': 0.525279} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.747941] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1679.748814] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.749105] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.749542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1679.749916] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf7a3a7-d39b-44d7-a40b-64ac958ee3d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.754719] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1679.754719] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5220e290-7e47-4d88-1f24-4a21dd07dcf6" [ 1679.754719] env[62820]: _type = "Task" [ 1679.754719] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.763856] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5220e290-7e47-4d88-1f24-4a21dd07dcf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.930081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a953869-8852-4504-b356-18e96fe86060 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.930081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a953869-8852-4504-b356-18e96fe86060 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.930081] env[62820]: DEBUG nova.objects.instance [None req-8a953869-8852-4504-b356-18e96fe86060 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'flavor' on Instance uuid 11843b38-3ce4-42a7-b855-a9d0b473e796 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1679.959869] env[62820]: DEBUG nova.compute.utils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1679.965430] env[62820]: DEBUG nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1679.965613] env[62820]: DEBUG nova.network.neutron [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1679.988101] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3e250aef-7645-46fb-85a0-4e42a768c321 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "5fbb6021-ca7d-4cce-90c9-113b7d833d49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.074s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.022795] env[62820]: DEBUG nova.policy [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe4b58f7f5bd405db5c7f8b630032aa1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'accd5c1cf55248b780b00e33faf79fa0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1680.152173] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696044, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.268866] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5220e290-7e47-4d88-1f24-4a21dd07dcf6, 'name': SearchDatastore_Task, 'duration_secs': 0.012444} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.269291] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.269726] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1680.270188] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.270437] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.270718] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1680.271094] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74e0887a-6c14-4725-b208-ef5e94ae7d0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.281295] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1680.281488] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1680.282230] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a2eff1b-3c5a-461e-b554-3cdc19b3a494 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.287861] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1680.287861] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d93384-65ba-ef08-0fbb-5749797f64fa" [ 1680.287861] env[62820]: _type = "Task" [ 1680.287861] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.295651] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d93384-65ba-ef08-0fbb-5749797f64fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.300139] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.300397] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.300574] env[62820]: INFO nova.compute.manager [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Shelving [ 1680.436721] env[62820]: DEBUG nova.objects.instance [None req-8a953869-8852-4504-b356-18e96fe86060 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'pci_requests' on Instance uuid 11843b38-3ce4-42a7-b855-a9d0b473e796 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1680.442759] env[62820]: DEBUG nova.network.neutron [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Successfully created port: a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1680.466544] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4db3aecc-8362-4b21-9b11-29c19a9da10e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.467726] env[62820]: DEBUG nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1680.476635] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.504s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.478070] env[62820]: INFO nova.compute.claims [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1680.535853] env[62820]: DEBUG oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b3b9c-080d-9e8a-9c3e-e2451f196a75/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1680.537123] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48f6e9f-5581-45f6-a1b3-bfdc982316a8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.544844] env[62820]: DEBUG oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b3b9c-080d-9e8a-9c3e-e2451f196a75/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1680.545029] env[62820]: ERROR oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b3b9c-080d-9e8a-9c3e-e2451f196a75/disk-0.vmdk due to incomplete transfer. [ 1680.545267] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4296789d-be49-4eeb-a32a-e2b70a4bb68d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.554399] env[62820]: DEBUG oslo_vmware.rw_handles [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b3b9c-080d-9e8a-9c3e-e2451f196a75/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1680.554598] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Uploaded image b5637a72-690d-4ce3-99c3-dc6f93341a35 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1680.556810] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1680.557073] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-def22173-39f2-4b69-bced-e7565f23a3e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.565238] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1680.565238] env[62820]: value = "task-1696045" [ 1680.565238] env[62820]: _type = "Task" [ 1680.565238] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.573453] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696045, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.647641] env[62820]: DEBUG oslo_vmware.api [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696044, 'name': PowerOnVM_Task, 'duration_secs': 0.598482} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.647641] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1680.654601] env[62820]: INFO nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1680.654601] env[62820]: DEBUG nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1680.654601] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9413e253-9a1c-4071-977b-610a589b4882 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.802022] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d93384-65ba-ef08-0fbb-5749797f64fa, 'name': SearchDatastore_Task, 'duration_secs': 0.01069} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.802022] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a29c032-e2e0-4741-a2bd-e8a5ac7802f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.812559] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1680.812559] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525f6935-cd27-9654-0960-062c9aae542e" [ 1680.812559] env[62820]: _type = "Task" [ 1680.812559] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.823618] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525f6935-cd27-9654-0960-062c9aae542e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.941342] env[62820]: DEBUG nova.objects.base [None req-8a953869-8852-4504-b356-18e96fe86060 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Object Instance<11843b38-3ce4-42a7-b855-a9d0b473e796> lazy-loaded attributes: flavor,pci_requests {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1680.941753] env[62820]: DEBUG nova.network.neutron [None req-8a953869-8852-4504-b356-18e96fe86060 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1681.029204] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8a953869-8852-4504-b356-18e96fe86060 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.099s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.076773] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696045, 'name': Destroy_Task, 'duration_secs': 0.380957} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.077099] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Destroyed the VM [ 1681.077364] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1681.077656] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9cd32203-2913-4fe2-897a-df2cb6cfc58c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.085931] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1681.085931] env[62820]: value = "task-1696046" [ 1681.085931] env[62820]: _type = "Task" [ 1681.085931] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.097827] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696046, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.175675] env[62820]: INFO nova.compute.manager [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Took 25.07 seconds to build instance. [ 1681.266273] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.266553] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.266763] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.266956] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.267147] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.271749] env[62820]: INFO nova.compute.manager [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Terminating instance [ 1681.313597] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1681.314541] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34bf4009-4bb4-43fe-9da2-cfef90f8ad04 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.326910] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525f6935-cd27-9654-0960-062c9aae542e, 'name': SearchDatastore_Task, 'duration_secs': 0.010719} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.328295] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.328569] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 6da857ea-f213-4b17-9e9f-d74d1ea649c7/6da857ea-f213-4b17-9e9f-d74d1ea649c7.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1681.328884] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1681.328884] env[62820]: value = "task-1696047" [ 1681.328884] env[62820]: _type = "Task" [ 1681.328884] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.329088] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40eebfb4-241a-4d7d-a2af-ac5161eeb278 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.340934] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.342558] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1681.342558] env[62820]: value = "task-1696048" [ 1681.342558] env[62820]: _type = "Task" [ 1681.342558] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.351390] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.492777] env[62820]: DEBUG nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1681.519872] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1681.520158] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1681.520324] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1681.520526] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1681.520677] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1681.520836] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1681.521083] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1681.521248] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1681.521418] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1681.521598] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1681.521857] env[62820]: DEBUG nova.virt.hardware [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1681.522697] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97458e6d-7430-4037-b7e4-1f823317e216 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.537277] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ce4622-004b-4dc1-ad47-778495282dd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.599272] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696046, 'name': RemoveSnapshot_Task} progress is 58%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.680608] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6bfacaf-987f-45e4-9d42-41bbf8aa179d tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.583s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.776306] env[62820]: DEBUG nova.compute.manager [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1681.776510] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1681.777437] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520b9b26-35e6-4dc8-b91e-1065312c1da3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.787684] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1681.788084] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e12d0a98-cc34-4801-8a13-f40803a54405 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.796933] env[62820]: DEBUG oslo_vmware.api [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1681.796933] env[62820]: value = "task-1696049" [ 1681.796933] env[62820]: _type = "Task" [ 1681.796933] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.810570] env[62820]: DEBUG oslo_vmware.api [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1696049, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.846307] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696047, 'name': PowerOffVM_Task, 'duration_secs': 0.223738} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.850306] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1681.852449] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40842b12-7dcc-41ce-a2f4-2f3cc93af210 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.855874] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67599d0a-6b4f-442c-be0a-9da289a1de35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.865485] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696048, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.882815] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0493cc-e499-47dc-97f4-da2b6646556a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.886317] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d04d64-5e4e-45a3-95a3-2f47c512c209 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.924746] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335575d6-36f3-412d-a2cd-c2dee6c2a1b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.936850] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1e0633-e27c-4d7a-89d4-b20808781eed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.952896] env[62820]: DEBUG nova.compute.provider_tree [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1682.097366] env[62820]: DEBUG oslo_vmware.api [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696046, 'name': RemoveSnapshot_Task, 'duration_secs': 0.590345} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.097657] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1682.097871] env[62820]: INFO nova.compute.manager [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Took 14.61 seconds to snapshot the instance on the hypervisor. [ 1682.101692] env[62820]: DEBUG nova.network.neutron [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Successfully updated port: a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1682.110142] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.110142] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.110142] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.110142] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.110142] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.112516] env[62820]: INFO nova.compute.manager [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Terminating instance [ 1682.307721] env[62820]: DEBUG oslo_vmware.api [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1696049, 'name': PowerOffVM_Task, 'duration_secs': 0.248032} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.308032] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1682.308204] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1682.308457] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a4d727a-f5c9-4b14-a2cd-2a8ce4db0335 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.318839] env[62820]: DEBUG nova.compute.manager [req-1b22d1bf-ec37-476e-81ee-9b8d568d5e65 req-071b7564-0885-4e1d-8d6a-9ee1d33b9a9d service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Received event network-vif-plugged-a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1682.319066] env[62820]: DEBUG oslo_concurrency.lockutils [req-1b22d1bf-ec37-476e-81ee-9b8d568d5e65 req-071b7564-0885-4e1d-8d6a-9ee1d33b9a9d service nova] Acquiring lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.319284] env[62820]: DEBUG oslo_concurrency.lockutils [req-1b22d1bf-ec37-476e-81ee-9b8d568d5e65 req-071b7564-0885-4e1d-8d6a-9ee1d33b9a9d service nova] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.319422] env[62820]: DEBUG oslo_concurrency.lockutils [req-1b22d1bf-ec37-476e-81ee-9b8d568d5e65 req-071b7564-0885-4e1d-8d6a-9ee1d33b9a9d service nova] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.319583] env[62820]: DEBUG nova.compute.manager [req-1b22d1bf-ec37-476e-81ee-9b8d568d5e65 req-071b7564-0885-4e1d-8d6a-9ee1d33b9a9d service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] No waiting events found dispatching network-vif-plugged-a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1682.319765] env[62820]: WARNING nova.compute.manager [req-1b22d1bf-ec37-476e-81ee-9b8d568d5e65 req-071b7564-0885-4e1d-8d6a-9ee1d33b9a9d service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Received unexpected event network-vif-plugged-a3b6a7be-3800-4d75-9bf0-003542502fcb for instance with vm_state building and task_state spawning. [ 1682.356198] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55037} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.356535] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 6da857ea-f213-4b17-9e9f-d74d1ea649c7/6da857ea-f213-4b17-9e9f-d74d1ea649c7.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1682.356757] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1682.357016] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1559a71-d8bc-4c43-91a8-534341b73937 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.364585] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1682.364585] env[62820]: value = "task-1696051" [ 1682.364585] env[62820]: _type = "Task" [ 1682.364585] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.374174] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.433185] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1682.433530] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9e0427b7-ac9c-4bc7-9433-d22eb4e95feb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.440713] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1682.440713] env[62820]: value = "task-1696052" [ 1682.440713] env[62820]: _type = "Task" [ 1682.440713] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.452812] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696052, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.456762] env[62820]: DEBUG nova.scheduler.client.report [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1682.518024] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1682.518296] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1682.518516] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleting the datastore file [datastore1] 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1682.518807] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7dab6b7-3a0a-4f78-a554-9272819682fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.525939] env[62820]: DEBUG oslo_vmware.api [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1682.525939] env[62820]: value = "task-1696053" [ 1682.525939] env[62820]: _type = "Task" [ 1682.525939] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.534321] env[62820]: DEBUG oslo_vmware.api [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1696053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.608513] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1682.608677] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.608895] env[62820]: DEBUG nova.network.neutron [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1682.620746] env[62820]: DEBUG nova.compute.manager [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1682.620972] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1682.622071] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256013f3-1a2f-420b-b474-4b5363a5bde3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.625516] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.625733] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.625972] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.626201] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.626387] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.628698] env[62820]: INFO nova.compute.manager [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Terminating instance [ 1682.637292] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1682.637678] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb5533e6-025b-45ac-a0e2-efd7c267bddf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.648053] env[62820]: DEBUG oslo_vmware.api [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1682.648053] env[62820]: value = "task-1696054" [ 1682.648053] env[62820]: _type = "Task" [ 1682.648053] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.651042] env[62820]: DEBUG nova.compute.manager [None req-66eb1a16-a998-4299-9cf8-c265943da9a5 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Found 1 images (rotation: 2) {{(pid=62820) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4898}} [ 1682.662229] env[62820]: DEBUG oslo_vmware.api [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696054, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.878072] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068373} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.878072] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1682.879193] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca703146-e2ee-44eb-a56f-1a1d849f4ff7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.905536] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 6da857ea-f213-4b17-9e9f-d74d1ea649c7/6da857ea-f213-4b17-9e9f-d74d1ea649c7.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1682.905945] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5443cec5-ddec-42d1-a388-5e9d6425db26 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.929323] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1682.929323] env[62820]: value = "task-1696055" [ 1682.929323] env[62820]: _type = "Task" [ 1682.929323] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.939637] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696055, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.951778] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696052, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.964066] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.487s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.964485] env[62820]: DEBUG nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1682.967845] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.957s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.968221] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.970527] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.865s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.970736] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.972820] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.675s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.973063] env[62820]: DEBUG nova.objects.instance [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lazy-loading 'resources' on Instance uuid 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1683.007302] env[62820]: INFO nova.scheduler.client.report [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted allocations for instance 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea [ 1683.009458] env[62820]: INFO nova.scheduler.client.report [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Deleted allocations for instance b7c9f518-c908-42cc-ba09-59b0f8431f68 [ 1683.040284] env[62820]: DEBUG oslo_vmware.api [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1696053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386207} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.040574] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.040765] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.040945] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.041187] env[62820]: INFO nova.compute.manager [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1683.041469] env[62820]: DEBUG oslo.service.loopingcall [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.041718] env[62820]: DEBUG nova.compute.manager [-] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1683.041802] env[62820]: DEBUG nova.network.neutron [-] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1683.114038] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.114038] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.114038] env[62820]: DEBUG nova.objects.instance [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'flavor' on Instance uuid 11843b38-3ce4-42a7-b855-a9d0b473e796 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1683.132371] env[62820]: DEBUG nova.compute.manager [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1683.132584] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1683.133724] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba44bc2-7375-435f-97ef-20a09c268a9d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.144790] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1683.145253] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75dd2970-7883-4ecb-93c0-1f0b4505de2c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.149046] env[62820]: DEBUG nova.network.neutron [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1683.158056] env[62820]: DEBUG oslo_vmware.api [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1683.158056] env[62820]: value = "task-1696056" [ 1683.158056] env[62820]: _type = "Task" [ 1683.158056] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.166136] env[62820]: DEBUG oslo_vmware.api [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696054, 'name': PowerOffVM_Task, 'duration_secs': 0.358931} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.166899] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1683.167098] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1683.167424] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3540849b-976a-42eb-b636-969ba3a91ab7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.175579] env[62820]: DEBUG oslo_vmware.api [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.278178] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1683.278502] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1683.278802] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleting the datastore file [datastore1] 0774673f-e7f2-46ce-b9ec-8fadb36ce192 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1683.279148] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe3b90ba-46ab-4072-8e57-9d16287fad6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.292586] env[62820]: DEBUG oslo_vmware.api [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1683.292586] env[62820]: value = "task-1696058" [ 1683.292586] env[62820]: _type = "Task" [ 1683.292586] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.307177] env[62820]: DEBUG oslo_vmware.api [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.328964] env[62820]: DEBUG nova.network.neutron [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Updating instance_info_cache with network_info: [{"id": "a3b6a7be-3800-4d75-9bf0-003542502fcb", "address": "fa:16:3e:88:f1:00", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b6a7be-38", "ovs_interfaceid": "a3b6a7be-3800-4d75-9bf0-003542502fcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.441862] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696055, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.456098] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696052, 'name': CreateSnapshot_Task, 'duration_secs': 1.000834} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.456400] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1683.457220] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ddb767-2abe-4f16-996f-5832fcb85f7c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.476618] env[62820]: DEBUG nova.compute.utils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1683.481390] env[62820]: DEBUG nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1683.481805] env[62820]: DEBUG nova.network.neutron [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1683.521181] env[62820]: DEBUG oslo_concurrency.lockutils [None req-76268726-e61b-4fa0-9db7-3bbeb388a584 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "78d9c7ad-af34-4e84-bd0c-d0bf287be0ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.194s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.521928] env[62820]: DEBUG oslo_concurrency.lockutils [None req-382574cf-cfbb-4311-bbee-f65f71753214 tempest-ServersTestBootFromVolume-1917126359 tempest-ServersTestBootFromVolume-1917126359-project-member] Lock "b7c9f518-c908-42cc-ba09-59b0f8431f68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.580s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.526539] env[62820]: DEBUG nova.policy [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc838df5682041ed97e19ce34d9f14ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a07ed2a19149b3a58ee43a07e13bba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1683.676819] env[62820]: DEBUG oslo_vmware.api [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696056, 'name': PowerOffVM_Task, 'duration_secs': 0.302994} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.677121] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1683.677335] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1683.677957] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f642bd5a-2012-45c6-ac75-4f2ce189c61b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.719340] env[62820]: DEBUG nova.objects.instance [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'pci_requests' on Instance uuid 11843b38-3ce4-42a7-b855-a9d0b473e796 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1683.757743] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1683.757743] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1683.757743] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleting the datastore file [datastore1] 8a105764-ebd9-4c0a-b555-c5fd5ea8684d {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1683.758132] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-670fd02b-0a33-43f6-826a-f594f80fbeb6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.762430] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfe6739-43bf-4606-b99d-e4a42451cb6b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.771562] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8289d1db-ed5a-4323-8424-7c1aec9352dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.775458] env[62820]: DEBUG oslo_vmware.api [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for the task: (returnval){ [ 1683.775458] env[62820]: value = "task-1696060" [ 1683.775458] env[62820]: _type = "Task" [ 1683.775458] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.804107] env[62820]: DEBUG nova.network.neutron [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Successfully created port: 2e8ab641-c961-452e-a6eb-d760374ac2b2 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1683.809599] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba800ee-9522-4924-9d89-ac6bd4289bac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.815934] env[62820]: DEBUG oslo_vmware.api [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.825505] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b371e723-2a52-44a1-ba29-5deda9e7c153 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.829565] env[62820]: DEBUG oslo_vmware.api [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226699} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.829825] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.830070] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.830294] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.830486] env[62820]: INFO nova.compute.manager [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1683.830763] env[62820]: DEBUG oslo.service.loopingcall [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.831430] env[62820]: DEBUG nova.compute.manager [-] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1683.831580] env[62820]: DEBUG nova.network.neutron [-] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1683.833561] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.833913] env[62820]: DEBUG nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Instance network_info: |[{"id": "a3b6a7be-3800-4d75-9bf0-003542502fcb", "address": "fa:16:3e:88:f1:00", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b6a7be-38", "ovs_interfaceid": "a3b6a7be-3800-4d75-9bf0-003542502fcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1683.834167] env[62820]: DEBUG nova.network.neutron [-] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.845206] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:f1:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9a1e09ef-7c9c-45d9-9bf4-55b913524948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3b6a7be-3800-4d75-9bf0-003542502fcb', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1683.854149] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Creating folder: Project (accd5c1cf55248b780b00e33faf79fa0). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1683.855218] env[62820]: DEBUG nova.compute.provider_tree [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.857682] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-354923e0-32fb-4a37-b65d-9baf7055269e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.872587] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Created folder: Project (accd5c1cf55248b780b00e33faf79fa0) in parent group-v353379. [ 1683.873104] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Creating folder: Instances. Parent ref: group-v353623. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1683.873104] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f90772d-41f7-467a-8469-b499151b1e32 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.884558] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Created folder: Instances in parent group-v353623. [ 1683.884816] env[62820]: DEBUG oslo.service.loopingcall [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.885039] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1683.885292] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99b07d00-bc3b-46fd-990b-4d97a5beb239 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.909233] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1683.909233] env[62820]: value = "task-1696063" [ 1683.909233] env[62820]: _type = "Task" [ 1683.909233] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.915945] env[62820]: DEBUG nova.compute.manager [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1683.919571] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c767a91-0a42-4c26-8e96-e924f5bc8694 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.922043] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696063, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.941700] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696055, 'name': ReconfigVM_Task, 'duration_secs': 0.535465} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.941990] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 6da857ea-f213-4b17-9e9f-d74d1ea649c7/6da857ea-f213-4b17-9e9f-d74d1ea649c7.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1683.942654] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9941a91-7c6b-488c-a3c4-45ce7960f7ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.951814] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1683.951814] env[62820]: value = "task-1696064" [ 1683.951814] env[62820]: _type = "Task" [ 1683.951814] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.962017] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696064, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.976764] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1683.977574] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-10e2841e-c60b-4544-b008-f8fdb2a479db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.981631] env[62820]: DEBUG nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1683.988134] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1683.988134] env[62820]: value = "task-1696065" [ 1683.988134] env[62820]: _type = "Task" [ 1683.988134] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.000416] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696065, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.223856] env[62820]: DEBUG nova.objects.base [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Object Instance<11843b38-3ce4-42a7-b855-a9d0b473e796> lazy-loaded attributes: flavor,pci_requests {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1684.224229] env[62820]: DEBUG nova.network.neutron [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1684.283061] env[62820]: DEBUG nova.policy [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1684.288193] env[62820]: DEBUG oslo_vmware.api [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Task: {'id': task-1696060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133504} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.288427] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1684.288614] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1684.288827] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1684.289022] env[62820]: INFO nova.compute.manager [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1684.289288] env[62820]: DEBUG oslo.service.loopingcall [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1684.289486] env[62820]: DEBUG nova.compute.manager [-] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1684.289582] env[62820]: DEBUG nova.network.neutron [-] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1684.358138] env[62820]: INFO nova.compute.manager [-] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Took 1.32 seconds to deallocate network for instance. [ 1684.364018] env[62820]: DEBUG nova.scheduler.client.report [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1684.420792] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696063, 'name': CreateVM_Task, 'duration_secs': 0.453462} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.421042] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1684.422292] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.422508] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.422935] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1684.423475] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03533c6c-28c6-4e58-a7bf-a71b8d22450d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.432116] env[62820]: INFO nova.compute.manager [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] instance snapshotting [ 1684.432116] env[62820]: DEBUG nova.objects.instance [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'flavor' on Instance uuid 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1684.432116] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1684.432116] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c7a790-8963-7fff-cfc9-546aed2d8cba" [ 1684.432116] env[62820]: _type = "Task" [ 1684.432116] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.442856] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c7a790-8963-7fff-cfc9-546aed2d8cba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.468778] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696064, 'name': Rename_Task, 'duration_secs': 0.21096} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.469134] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1684.469390] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1d84570-99e6-4893-91a5-5cd699d3a51a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.491064] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1684.491064] env[62820]: value = "task-1696066" [ 1684.491064] env[62820]: _type = "Task" [ 1684.491064] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.506935] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696065, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.510633] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696066, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.544578] env[62820]: DEBUG nova.compute.manager [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Received event network-changed-a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1684.544766] env[62820]: DEBUG nova.compute.manager [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Refreshing instance network info cache due to event network-changed-a3b6a7be-3800-4d75-9bf0-003542502fcb. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1684.544993] env[62820]: DEBUG oslo_concurrency.lockutils [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] Acquiring lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.545350] env[62820]: DEBUG oslo_concurrency.lockutils [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] Acquired lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.545541] env[62820]: DEBUG nova.network.neutron [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Refreshing network info cache for port a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1684.682847] env[62820]: DEBUG nova.compute.manager [req-25502949-0c25-40ef-b2ec-9659d8bd0754 req-71093bc6-6152-4982-9450-077da9373f98 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Received event network-vif-deleted-51b13f24-958b-455b-b09e-8a78b1c92de2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1684.682847] env[62820]: INFO nova.compute.manager [req-25502949-0c25-40ef-b2ec-9659d8bd0754 req-71093bc6-6152-4982-9450-077da9373f98 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Neutron deleted interface 51b13f24-958b-455b-b09e-8a78b1c92de2; detaching it from the instance and deleting it from the info cache [ 1684.682847] env[62820]: DEBUG nova.network.neutron [req-25502949-0c25-40ef-b2ec-9659d8bd0754 req-71093bc6-6152-4982-9450-077da9373f98 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.684880] env[62820]: DEBUG nova.network.neutron [-] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.687841] env[62820]: DEBUG nova.network.neutron [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Successfully created port: f0084819-f55d-4bd8-a480-72eab0bdd647 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1684.872725] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.899s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.875896] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.877490] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.765s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.881645] env[62820]: INFO nova.compute.claims [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1684.913455] env[62820]: INFO nova.scheduler.client.report [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Deleted allocations for instance 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8 [ 1684.942288] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79783275-ab85-4b0b-b4b4-bc053f74ecc3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.953658] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c7a790-8963-7fff-cfc9-546aed2d8cba, 'name': SearchDatastore_Task, 'duration_secs': 0.018146} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.982335] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.985101] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1684.985101] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.985101] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.985101] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1684.985101] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a7c4b70-9e52-4ae6-a55a-b7a0c72935c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.987564] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c789db8-6444-4214-9f40-4862701432fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.001355] env[62820]: DEBUG nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1685.008879] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1685.008879] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1685.010523] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8355608d-0b79-4ca9-99ab-76b5ffd5dc9a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.019046] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696065, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.019319] env[62820]: DEBUG oslo_vmware.api [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696066, 'name': PowerOnVM_Task, 'duration_secs': 0.48141} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.019945] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1685.020192] env[62820]: INFO nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Took 10.11 seconds to spawn the instance on the hypervisor. [ 1685.020418] env[62820]: DEBUG nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1685.021223] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c74669-6c8a-4dac-96a5-f253ba768eb3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.026027] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1685.026027] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e24771-75a1-71de-920d-f6db103e32e2" [ 1685.026027] env[62820]: _type = "Task" [ 1685.026027] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.040970] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e24771-75a1-71de-920d-f6db103e32e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.061036] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1685.061289] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1685.061449] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1685.061630] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1685.061787] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1685.061976] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1685.062213] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1685.062375] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1685.062544] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1685.062716] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1685.062891] env[62820]: DEBUG nova.virt.hardware [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1685.064678] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca4bf25-007e-48ff-9563-1d8d6640edb5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.077416] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd33672-d14c-4018-a828-e30526beaa39 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.146306] env[62820]: DEBUG nova.network.neutron [-] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.162154] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.162154] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.162154] env[62820]: DEBUG nova.compute.manager [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1685.163392] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a674bea1-0a17-404c-848e-b491d0bee2f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.172372] env[62820]: DEBUG nova.compute.manager [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1685.173039] env[62820]: DEBUG nova.objects.instance [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'flavor' on Instance uuid b89d32f8-0675-4b0c-977e-b7900e62bdd8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1685.186122] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f59e0f07-30cb-4e7e-8882-0c20a2f6a8fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.192767] env[62820]: INFO nova.compute.manager [-] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Took 1.36 seconds to deallocate network for instance. [ 1685.198170] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a983a3-c5cd-47c1-9370-2be60e6f145c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.246861] env[62820]: DEBUG nova.compute.manager [req-25502949-0c25-40ef-b2ec-9659d8bd0754 req-71093bc6-6152-4982-9450-077da9373f98 service nova] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Detach interface failed, port_id=51b13f24-958b-455b-b09e-8a78b1c92de2, reason: Instance 8a105764-ebd9-4c0a-b555-c5fd5ea8684d could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1685.421940] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d7621251-9f42-4c2c-b800-cfe997943376 tempest-ServerMetadataTestJSON-404402466 tempest-ServerMetadataTestJSON-404402466-project-member] Lock "29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.781s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.502562] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696065, 'name': CloneVM_Task, 'duration_secs': 1.420682} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.504838] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Created linked-clone VM from snapshot [ 1685.504838] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc8f96e-52a3-4ff5-85a0-c697b208a0bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.512357] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1685.512653] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Uploading image 1ad372de-b4a3-441d-b9c8-61354d703fed {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1685.514577] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2e89fae4-1df4-4736-be2e-aa02876e183a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.522467] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1685.522467] env[62820]: value = "task-1696067" [ 1685.522467] env[62820]: _type = "Task" [ 1685.522467] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.545656] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696067, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.553731] env[62820]: DEBUG oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1685.553731] env[62820]: value = "vm-353626" [ 1685.553731] env[62820]: _type = "VirtualMachine" [ 1685.553731] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1685.554102] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e24771-75a1-71de-920d-f6db103e32e2, 'name': SearchDatastore_Task, 'duration_secs': 0.023817} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.554506] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7594eb53-5530-49e6-b228-e07eeb6d03c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.556487] env[62820]: INFO nova.compute.manager [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Took 28.24 seconds to build instance. [ 1685.561130] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf5d52b3-1d65-47e9-871e-305ac7d9e9e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.565869] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1685.565869] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526a0e32-37d0-8427-21c4-3fc108cc29eb" [ 1685.565869] env[62820]: _type = "Task" [ 1685.565869] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.571540] env[62820]: DEBUG oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lease: (returnval){ [ 1685.571540] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f2473c-03d2-b222-025f-17451f3e6153" [ 1685.571540] env[62820]: _type = "HttpNfcLease" [ 1685.571540] env[62820]: } obtained for exporting VM: (result){ [ 1685.571540] env[62820]: value = "vm-353626" [ 1685.571540] env[62820]: _type = "VirtualMachine" [ 1685.571540] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1685.571940] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the lease: (returnval){ [ 1685.571940] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f2473c-03d2-b222-025f-17451f3e6153" [ 1685.571940] env[62820]: _type = "HttpNfcLease" [ 1685.571940] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1685.580868] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526a0e32-37d0-8427-21c4-3fc108cc29eb, 'name': SearchDatastore_Task, 'duration_secs': 0.011009} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.581664] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.581855] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 03b0abc8-dd32-4cf9-8750-d64b8a66695e/03b0abc8-dd32-4cf9-8750-d64b8a66695e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1685.582398] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6eef00be-f872-4208-a698-28a00feb8e20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.586352] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1685.586352] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f2473c-03d2-b222-025f-17451f3e6153" [ 1685.586352] env[62820]: _type = "HttpNfcLease" [ 1685.586352] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1685.586994] env[62820]: DEBUG oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1685.586994] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f2473c-03d2-b222-025f-17451f3e6153" [ 1685.586994] env[62820]: _type = "HttpNfcLease" [ 1685.586994] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1685.587763] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76574d41-8366-4e81-92ec-4023a5c450aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.591728] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1685.591728] env[62820]: value = "task-1696069" [ 1685.591728] env[62820]: _type = "Task" [ 1685.591728] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.598875] env[62820]: DEBUG oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f68e2-594e-cbfb-3ccd-060918d5a9e7/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1685.599139] env[62820]: DEBUG oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f68e2-594e-cbfb-3ccd-060918d5a9e7/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1685.666529] env[62820]: INFO nova.compute.manager [-] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Took 1.38 seconds to deallocate network for instance. [ 1685.673819] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.711810] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-02a8ba4a-5eca-4294-9aa9-c8e350d5d774 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.722173] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.760519] env[62820]: DEBUG nova.network.neutron [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Updated VIF entry in instance network info cache for port a3b6a7be-3800-4d75-9bf0-003542502fcb. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1685.761932] env[62820]: DEBUG nova.network.neutron [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Updating instance_info_cache with network_info: [{"id": "a3b6a7be-3800-4d75-9bf0-003542502fcb", "address": "fa:16:3e:88:f1:00", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b6a7be-38", "ovs_interfaceid": "a3b6a7be-3800-4d75-9bf0-003542502fcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.926155] env[62820]: DEBUG nova.network.neutron [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Successfully updated port: 2e8ab641-c961-452e-a6eb-d760374ac2b2 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1686.037492] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696067, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.059294] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24b8810c-0235-4032-929d-26ec337fe98a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.753s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.103348] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696069, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50514} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.106394] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 03b0abc8-dd32-4cf9-8750-d64b8a66695e/03b0abc8-dd32-4cf9-8750-d64b8a66695e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1686.106750] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1686.108415] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d96b9e82-788d-46d6-8aff-4e02cd3616ea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.116766] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1686.116766] env[62820]: value = "task-1696070" [ 1686.116766] env[62820]: _type = "Task" [ 1686.116766] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.129026] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696070, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.184645] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.185631] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1686.185971] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-374aa645-52b6-44f7-b3d9-a0ac246ce2ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.204130] env[62820]: DEBUG oslo_vmware.api [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1686.204130] env[62820]: value = "task-1696071" [ 1686.204130] env[62820]: _type = "Task" [ 1686.204130] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.219553] env[62820]: DEBUG oslo_vmware.api [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696071, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.264394] env[62820]: DEBUG oslo_concurrency.lockutils [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] Releasing lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.264932] env[62820]: DEBUG nova.compute.manager [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Received event network-vif-deleted-40484755-60da-4d73-a825-a5d4eedee87b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1686.265646] env[62820]: DEBUG nova.compute.manager [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Received event network-vif-deleted-364be5e6-c3f1-45ae-97ca-f068e0cfeab6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1686.267805] env[62820]: INFO nova.compute.manager [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Neutron deleted interface 364be5e6-c3f1-45ae-97ca-f068e0cfeab6; detaching it from the instance and deleting it from the info cache [ 1686.268856] env[62820]: DEBUG nova.network.neutron [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.272922] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ba0cfe-c65c-4d84-a26c-eb6ce0372c2e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.287439] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876d99ca-9bb3-4066-ade5-a7017b0108ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.345131] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fefdbdc-ad1e-4f64-9a1c-22cc26289733 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.360146] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f65543-8705-43f0-892a-acb7d77d7836 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.392912] env[62820]: DEBUG nova.compute.provider_tree [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1686.433113] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.433206] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.433299] env[62820]: DEBUG nova.network.neutron [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1686.534768] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696067, 'name': CreateSnapshot_Task, 'duration_secs': 0.739624} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.535149] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1686.535977] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd1ea4b-bfa7-4cdf-84ae-0410f777b129 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.629817] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696070, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090404} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.629817] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1686.629817] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1124a41c-a586-4802-9936-b69265982170 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.655010] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 03b0abc8-dd32-4cf9-8750-d64b8a66695e/03b0abc8-dd32-4cf9-8750-d64b8a66695e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1686.656080] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70f81ede-a4d6-4351-a5ec-d371261311dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.684381] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1686.684381] env[62820]: value = "task-1696072" [ 1686.684381] env[62820]: _type = "Task" [ 1686.684381] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.705423] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696072, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.723453] env[62820]: DEBUG oslo_vmware.api [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696071, 'name': PowerOffVM_Task, 'duration_secs': 0.335284} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.723607] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1686.724379] env[62820]: DEBUG nova.compute.manager [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1686.724831] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21c60bb-d5f8-48d0-9caa-a044eb814a8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.764977] env[62820]: DEBUG nova.network.neutron [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Successfully updated port: f0084819-f55d-4bd8-a480-72eab0bdd647 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1686.776338] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18dd7219-0b79-478b-9db6-0b680d486c9a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.791963] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb49d713-d6ec-413e-855a-8df1384b876f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.842389] env[62820]: DEBUG nova.compute.manager [req-927a1d00-a83e-47b0-a2ca-468409d75928 req-eaad8fa4-2070-473c-8df6-de18664bac0d service nova] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Detach interface failed, port_id=364be5e6-c3f1-45ae-97ca-f068e0cfeab6, reason: Instance 0774673f-e7f2-46ce-b9ec-8fadb36ce192 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1686.896717] env[62820]: DEBUG nova.scheduler.client.report [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1687.056497] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1687.057224] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ce87a271-8427-449a-9f0f-77790ec17b30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.067306] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1687.067306] env[62820]: value = "task-1696073" [ 1687.067306] env[62820]: _type = "Task" [ 1687.067306] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.076876] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696073, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.196213] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.236992] env[62820]: DEBUG nova.network.neutron [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1687.244034] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c15fbc97-79cf-4e02-8a64-26b58d9cddec tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.082s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.269023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.269362] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.269728] env[62820]: DEBUG nova.network.neutron [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1687.326490] env[62820]: WARNING nova.network.neutron [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] 26851e2e-dece-4dce-bec8-e64227003b80 already exists in list: networks containing: ['26851e2e-dece-4dce-bec8-e64227003b80']. ignoring it [ 1687.403061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.403759] env[62820]: DEBUG nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1687.409437] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.616s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.410161] env[62820]: DEBUG nova.objects.instance [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'resources' on Instance uuid 53ba381a-9f81-4c37-8758-af56fc165dd7 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1687.580360] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696073, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.606561] env[62820]: DEBUG nova.network.neutron [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance_info_cache with network_info: [{"id": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "address": "fa:16:3e:a8:e9:1b", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8ab641-c9", "ovs_interfaceid": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.642696] env[62820]: DEBUG nova.compute.manager [req-e91718c3-cb8d-4eff-98c8-53973183aad9 req-8861fce3-facf-4101-bf38-bfed10f26ad8 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-vif-plugged-f0084819-f55d-4bd8-a480-72eab0bdd647 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1687.643589] env[62820]: DEBUG oslo_concurrency.lockutils [req-e91718c3-cb8d-4eff-98c8-53973183aad9 req-8861fce3-facf-4101-bf38-bfed10f26ad8 service nova] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.644132] env[62820]: DEBUG oslo_concurrency.lockutils [req-e91718c3-cb8d-4eff-98c8-53973183aad9 req-8861fce3-facf-4101-bf38-bfed10f26ad8 service nova] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.644618] env[62820]: DEBUG oslo_concurrency.lockutils [req-e91718c3-cb8d-4eff-98c8-53973183aad9 req-8861fce3-facf-4101-bf38-bfed10f26ad8 service nova] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.644902] env[62820]: DEBUG nova.compute.manager [req-e91718c3-cb8d-4eff-98c8-53973183aad9 req-8861fce3-facf-4101-bf38-bfed10f26ad8 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] No waiting events found dispatching network-vif-plugged-f0084819-f55d-4bd8-a480-72eab0bdd647 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1687.645225] env[62820]: WARNING nova.compute.manager [req-e91718c3-cb8d-4eff-98c8-53973183aad9 req-8861fce3-facf-4101-bf38-bfed10f26ad8 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received unexpected event network-vif-plugged-f0084819-f55d-4bd8-a480-72eab0bdd647 for instance with vm_state active and task_state None. [ 1687.659993] env[62820]: DEBUG nova.compute.manager [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Received event network-vif-plugged-2e8ab641-c961-452e-a6eb-d760374ac2b2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1687.660790] env[62820]: DEBUG oslo_concurrency.lockutils [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] Acquiring lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.661193] env[62820]: DEBUG oslo_concurrency.lockutils [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] Lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.662219] env[62820]: DEBUG oslo_concurrency.lockutils [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] Lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.662630] env[62820]: DEBUG nova.compute.manager [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] No waiting events found dispatching network-vif-plugged-2e8ab641-c961-452e-a6eb-d760374ac2b2 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1687.663054] env[62820]: WARNING nova.compute.manager [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Received unexpected event network-vif-plugged-2e8ab641-c961-452e-a6eb-d760374ac2b2 for instance with vm_state building and task_state spawning. [ 1687.663633] env[62820]: DEBUG nova.compute.manager [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Received event network-changed-2e8ab641-c961-452e-a6eb-d760374ac2b2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1687.664910] env[62820]: DEBUG nova.compute.manager [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Refreshing instance network info cache due to event network-changed-2e8ab641-c961-452e-a6eb-d760374ac2b2. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1687.664910] env[62820]: DEBUG oslo_concurrency.lockutils [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] Acquiring lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.701351] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696072, 'name': ReconfigVM_Task, 'duration_secs': 0.662874} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.701351] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 03b0abc8-dd32-4cf9-8750-d64b8a66695e/03b0abc8-dd32-4cf9-8750-d64b8a66695e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1687.701351] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad51c3a1-b3d8-4d48-91e6-9dcd103adba6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.710025] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1687.710025] env[62820]: value = "task-1696074" [ 1687.710025] env[62820]: _type = "Task" [ 1687.710025] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.733419] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696074, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.772499] env[62820]: DEBUG nova.network.neutron [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f0084819-f55d-4bd8-a480-72eab0bdd647", "address": "fa:16:3e:ff:4b:7d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0084819-f5", "ovs_interfaceid": "f0084819-f55d-4bd8-a480-72eab0bdd647", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.918756] env[62820]: DEBUG nova.compute.utils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1687.921242] env[62820]: DEBUG nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1687.921919] env[62820]: DEBUG nova.network.neutron [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1687.997112] env[62820]: DEBUG nova.policy [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45d374be267c4206b07b238d3ad26000', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '001a6dcf0a33474992d8d7c01bc2022d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1688.083090] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696073, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.111643] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.112615] env[62820]: DEBUG nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Instance network_info: |[{"id": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "address": "fa:16:3e:a8:e9:1b", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8ab641-c9", "ovs_interfaceid": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1688.112615] env[62820]: DEBUG oslo_concurrency.lockutils [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] Acquired lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.112615] env[62820]: DEBUG nova.network.neutron [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Refreshing network info cache for port 2e8ab641-c961-452e-a6eb-d760374ac2b2 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1688.115542] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:e9:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a874c214-8cdf-4a41-a718-84262b2a28d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e8ab641-c961-452e-a6eb-d760374ac2b2', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1688.123379] env[62820]: DEBUG oslo.service.loopingcall [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1688.126751] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1688.127021] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d46a8a6-c5c9-4a9a-a697-a5f177c9a63a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.153781] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1688.153781] env[62820]: value = "task-1696075" [ 1688.153781] env[62820]: _type = "Task" [ 1688.153781] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.168366] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696075, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.222033] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696074, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.278384] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.280015] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.280232] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.281909] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cd4de5-ecf7-4f16-9f25-63aad66305c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.302915] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1688.303313] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1688.303389] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1688.303848] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1688.304120] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1688.304312] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1688.304996] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1688.304996] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1688.304996] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1688.304996] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1688.306601] env[62820]: DEBUG nova.virt.hardware [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1688.313182] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Reconfiguring VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1688.314843] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3c9b0a2-7aad-478c-81cf-d3b640e7a25f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.333612] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e29e81-c717-4a17-a23f-0c2e468ec6eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.347224] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453d21a0-9137-4e1d-8f92-5a8ca2ae62d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.352303] env[62820]: DEBUG oslo_vmware.api [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1688.352303] env[62820]: value = "task-1696076" [ 1688.352303] env[62820]: _type = "Task" [ 1688.352303] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.388978] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0301628-6692-43e6-8094-e77868729f5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.395120] env[62820]: DEBUG oslo_vmware.api [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696076, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.402096] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07753e48-a544-4492-86c3-78ae104c0079 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.421856] env[62820]: DEBUG nova.compute.provider_tree [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1688.425347] env[62820]: DEBUG nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1688.586079] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696073, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.628447] env[62820]: DEBUG nova.network.neutron [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Successfully created port: d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1688.665900] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696075, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.723100] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696074, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.873730] env[62820]: DEBUG oslo_vmware.api [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696076, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.926896] env[62820]: DEBUG nova.scheduler.client.report [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1688.960883] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.961148] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.036841] env[62820]: DEBUG nova.objects.instance [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'flavor' on Instance uuid b89d32f8-0675-4b0c-977e-b7900e62bdd8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1689.059015] env[62820]: DEBUG nova.network.neutron [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updated VIF entry in instance network info cache for port 2e8ab641-c961-452e-a6eb-d760374ac2b2. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1689.059552] env[62820]: DEBUG nova.network.neutron [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance_info_cache with network_info: [{"id": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "address": "fa:16:3e:a8:e9:1b", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8ab641-c9", "ovs_interfaceid": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.080985] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696073, 'name': CloneVM_Task, 'duration_secs': 1.770447} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.081631] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Created linked-clone VM from snapshot [ 1689.082189] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf11f746-c46a-4e74-9657-14353920ec14 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.092511] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Uploading image d1f53c66-1541-4919-9985-f1fa793ea874 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1689.123884] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1689.123884] env[62820]: value = "vm-353628" [ 1689.123884] env[62820]: _type = "VirtualMachine" [ 1689.123884] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1689.124199] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-767d9af7-9223-46c4-b80d-52f5c3013ad3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.138362] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease: (returnval){ [ 1689.138362] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cba224-ac51-9b96-738d-e2d09b01c64b" [ 1689.138362] env[62820]: _type = "HttpNfcLease" [ 1689.138362] env[62820]: } obtained for exporting VM: (result){ [ 1689.138362] env[62820]: value = "vm-353628" [ 1689.138362] env[62820]: _type = "VirtualMachine" [ 1689.138362] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1689.138731] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the lease: (returnval){ [ 1689.138731] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cba224-ac51-9b96-738d-e2d09b01c64b" [ 1689.138731] env[62820]: _type = "HttpNfcLease" [ 1689.138731] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1689.149824] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1689.149824] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cba224-ac51-9b96-738d-e2d09b01c64b" [ 1689.149824] env[62820]: _type = "HttpNfcLease" [ 1689.149824] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1689.168160] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696075, 'name': CreateVM_Task, 'duration_secs': 0.547278} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.168419] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1689.169309] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.169515] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.169921] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1689.170323] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a490570b-82a4-4ee7-bdda-0617acfe4ca4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.175998] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1689.175998] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529c679c-6971-43fb-72bd-246b94f208bf" [ 1689.175998] env[62820]: _type = "Task" [ 1689.175998] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.185519] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529c679c-6971-43fb-72bd-246b94f208bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.223786] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696074, 'name': Rename_Task, 'duration_secs': 1.242221} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.223786] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1689.223786] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4181816c-23a6-4c6a-82a7-d8cd0ca78711 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.233032] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1689.233032] env[62820]: value = "task-1696078" [ 1689.233032] env[62820]: _type = "Task" [ 1689.233032] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.242485] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.366703] env[62820]: DEBUG oslo_vmware.api [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696076, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.438104] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.029s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.441298] env[62820]: DEBUG nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1689.443937] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.568s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.444500] env[62820]: DEBUG nova.objects.instance [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lazy-loading 'resources' on Instance uuid 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1689.464542] env[62820]: DEBUG nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1689.470023] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1689.470272] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1689.470491] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1689.470712] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1689.470864] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1689.471276] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1689.471652] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1689.471932] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1689.472261] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1689.472553] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1689.472850] env[62820]: DEBUG nova.virt.hardware [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1689.474323] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34317f0d-3134-44e3-8e03-d3054f61caa6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.477827] env[62820]: INFO nova.scheduler.client.report [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted allocations for instance 53ba381a-9f81-4c37-8758-af56fc165dd7 [ 1689.489316] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ec3784-4e4b-4875-b90e-b208a3fcdd6d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.542247] env[62820]: DEBUG oslo_concurrency.lockutils [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.542428] env[62820]: DEBUG oslo_concurrency.lockutils [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.542602] env[62820]: DEBUG nova.network.neutron [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1689.542779] env[62820]: DEBUG nova.objects.instance [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'info_cache' on Instance uuid b89d32f8-0675-4b0c-977e-b7900e62bdd8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1689.562567] env[62820]: DEBUG oslo_concurrency.lockutils [req-169add14-750a-4727-83bd-721aaafd8876 req-13e2080d-5a30-4292-88c9-5ea4d57711d7 service nova] Releasing lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.650160] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1689.650160] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cba224-ac51-9b96-738d-e2d09b01c64b" [ 1689.650160] env[62820]: _type = "HttpNfcLease" [ 1689.650160] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1689.650160] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1689.650160] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cba224-ac51-9b96-738d-e2d09b01c64b" [ 1689.650160] env[62820]: _type = "HttpNfcLease" [ 1689.650160] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1689.650800] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4b61b0-ae15-47ed-bb57-808d82dcf4a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.660226] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd061-f5c3-caf8-2520-57c9ae9d3879/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1689.660426] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd061-f5c3-caf8-2520-57c9ae9d3879/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1689.747120] env[62820]: DEBUG nova.compute.manager [req-58dd8b3d-25c6-48eb-aa97-e105b04f5d52 req-376cf1df-b1ea-452c-9a13-43d87c2249c5 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-changed-f0084819-f55d-4bd8-a480-72eab0bdd647 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1689.747269] env[62820]: DEBUG nova.compute.manager [req-58dd8b3d-25c6-48eb-aa97-e105b04f5d52 req-376cf1df-b1ea-452c-9a13-43d87c2249c5 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Refreshing instance network info cache due to event network-changed-f0084819-f55d-4bd8-a480-72eab0bdd647. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1689.747488] env[62820]: DEBUG oslo_concurrency.lockutils [req-58dd8b3d-25c6-48eb-aa97-e105b04f5d52 req-376cf1df-b1ea-452c-9a13-43d87c2249c5 service nova] Acquiring lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.747647] env[62820]: DEBUG oslo_concurrency.lockutils [req-58dd8b3d-25c6-48eb-aa97-e105b04f5d52 req-376cf1df-b1ea-452c-9a13-43d87c2249c5 service nova] Acquired lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.747793] env[62820]: DEBUG nova.network.neutron [req-58dd8b3d-25c6-48eb-aa97-e105b04f5d52 req-376cf1df-b1ea-452c-9a13-43d87c2249c5 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Refreshing network info cache for port f0084819-f55d-4bd8-a480-72eab0bdd647 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1689.760707] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696078, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.764602] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529c679c-6971-43fb-72bd-246b94f208bf, 'name': SearchDatastore_Task, 'duration_secs': 0.015547} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.765660] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.765972] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1689.766292] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.766546] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.766663] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1689.767308] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7209537-e358-454d-9a0e-a70e5a0df541 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.780524] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1689.780809] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1689.781702] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31c8ffd1-50f6-48c5-90d6-71bd053e3327 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.789673] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1689.789673] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e0ab16-9870-21cd-5d0b-bba03d89bd1a" [ 1689.789673] env[62820]: _type = "Task" [ 1689.789673] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.799777] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e0ab16-9870-21cd-5d0b-bba03d89bd1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.814805] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4341bde5-ac84-428b-a662-1ef8cca055e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.867769] env[62820]: DEBUG oslo_vmware.api [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696076, 'name': ReconfigVM_Task, 'duration_secs': 1.23959} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.868596] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.870573] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Reconfigured VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1689.996218] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1b3f0612-3510-4a07-a428-fda9f84a4a73 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "53ba381a-9f81-4c37-8758-af56fc165dd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.352s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.019533] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.046443] env[62820]: DEBUG nova.objects.base [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1690.256887] env[62820]: DEBUG oslo_vmware.api [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696078, 'name': PowerOnVM_Task, 'duration_secs': 0.884915} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.260840] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1690.261600] env[62820]: INFO nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Took 8.77 seconds to spawn the instance on the hypervisor. [ 1690.262413] env[62820]: DEBUG nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1690.264231] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1508abf0-e896-4026-aa38-e8f72c17cac2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.309429] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e0ab16-9870-21cd-5d0b-bba03d89bd1a, 'name': SearchDatastore_Task, 'duration_secs': 0.020154} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.312473] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02b0de7f-cbc7-4ae0-b3a2-3c5c6f9c555f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.330145] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1690.330145] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d76d81-fafd-8ce7-d42f-0c5c42cb5913" [ 1690.330145] env[62820]: _type = "Task" [ 1690.330145] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.352796] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d76d81-fafd-8ce7-d42f-0c5c42cb5913, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.374195] env[62820]: DEBUG oslo_concurrency.lockutils [None req-33c8d1ca-6e49-4da5-95fc-77af766d0abc tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.262s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.383072] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb5439e-312f-4353-946f-d71a965e89ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.401951] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdda7d5c-1021-4829-8818-f2aaf14b9ddf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.439758] env[62820]: DEBUG nova.network.neutron [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Successfully updated port: d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1690.444571] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ad3c94-2714-4689-8af9-a21259b3c107 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.456371] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe0d072-f387-4be1-9b92-bc8e4cdc8ff7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.479063] env[62820]: DEBUG nova.compute.provider_tree [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1690.525381] env[62820]: DEBUG nova.compute.manager [req-3b381499-a25d-4bbf-8bb6-ae948970a8ba req-3c2b38c5-21d5-42e0-8058-7d8de1eeb453 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Received event network-vif-plugged-d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1690.525381] env[62820]: DEBUG oslo_concurrency.lockutils [req-3b381499-a25d-4bbf-8bb6-ae948970a8ba req-3c2b38c5-21d5-42e0-8058-7d8de1eeb453 service nova] Acquiring lock "76bd4a09-300d-460e-8442-21b4f6567698-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.525381] env[62820]: DEBUG oslo_concurrency.lockutils [req-3b381499-a25d-4bbf-8bb6-ae948970a8ba req-3c2b38c5-21d5-42e0-8058-7d8de1eeb453 service nova] Lock "76bd4a09-300d-460e-8442-21b4f6567698-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.525709] env[62820]: DEBUG oslo_concurrency.lockutils [req-3b381499-a25d-4bbf-8bb6-ae948970a8ba req-3c2b38c5-21d5-42e0-8058-7d8de1eeb453 service nova] Lock "76bd4a09-300d-460e-8442-21b4f6567698-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.525709] env[62820]: DEBUG nova.compute.manager [req-3b381499-a25d-4bbf-8bb6-ae948970a8ba req-3c2b38c5-21d5-42e0-8058-7d8de1eeb453 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] No waiting events found dispatching network-vif-plugged-d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1690.525870] env[62820]: WARNING nova.compute.manager [req-3b381499-a25d-4bbf-8bb6-ae948970a8ba req-3c2b38c5-21d5-42e0-8058-7d8de1eeb453 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Received unexpected event network-vif-plugged-d46278a6-5202-4c8b-890f-41286051b6d4 for instance with vm_state building and task_state spawning. [ 1690.763091] env[62820]: DEBUG nova.network.neutron [req-58dd8b3d-25c6-48eb-aa97-e105b04f5d52 req-376cf1df-b1ea-452c-9a13-43d87c2249c5 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updated VIF entry in instance network info cache for port f0084819-f55d-4bd8-a480-72eab0bdd647. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1690.763784] env[62820]: DEBUG nova.network.neutron [req-58dd8b3d-25c6-48eb-aa97-e105b04f5d52 req-376cf1df-b1ea-452c-9a13-43d87c2249c5 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f0084819-f55d-4bd8-a480-72eab0bdd647", "address": "fa:16:3e:ff:4b:7d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0084819-f5", "ovs_interfaceid": "f0084819-f55d-4bd8-a480-72eab0bdd647", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.796293] env[62820]: INFO nova.compute.manager [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Took 31.84 seconds to build instance. [ 1690.844577] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d76d81-fafd-8ce7-d42f-0c5c42cb5913, 'name': SearchDatastore_Task, 'duration_secs': 0.02214} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.847022] env[62820]: DEBUG nova.network.neutron [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.848651] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.852446] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 860637a2-8c59-42af-a9f5-4e80c5466274/860637a2-8c59-42af-a9f5-4e80c5466274.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1690.853120] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12e19eb6-f424-4ed5-a316-6f005fffd499 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.863840] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1690.863840] env[62820]: value = "task-1696079" [ 1690.863840] env[62820]: _type = "Task" [ 1690.863840] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.879811] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.949622] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquiring lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.949821] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquired lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.949979] env[62820]: DEBUG nova.network.neutron [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1690.985365] env[62820]: DEBUG nova.scheduler.client.report [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1691.277039] env[62820]: DEBUG oslo_concurrency.lockutils [req-58dd8b3d-25c6-48eb-aa97-e105b04f5d52 req-376cf1df-b1ea-452c-9a13-43d87c2249c5 service nova] Releasing lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.300404] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0ba8cb89-1338-460b-a389-2c479f0ee706 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.045s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.354796] env[62820]: DEBUG oslo_concurrency.lockutils [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.379361] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696079, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.494094] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.050s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.498059] env[62820]: DEBUG nova.network.neutron [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1691.499545] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.777s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.500018] env[62820]: DEBUG nova.objects.instance [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lazy-loading 'resources' on Instance uuid 0774673f-e7f2-46ce-b9ec-8fadb36ce192 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1691.525826] env[62820]: INFO nova.scheduler.client.report [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleted allocations for instance 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff [ 1691.750554] env[62820]: DEBUG nova.network.neutron [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Updating instance_info_cache with network_info: [{"id": "d46278a6-5202-4c8b-890f-41286051b6d4", "address": "fa:16:3e:8a:d4:a8", "network": {"id": "00ede8b9-7d74-4e3c-a4ba-c2baa25a2890", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-573826592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "001a6dcf0a33474992d8d7c01bc2022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd46278a6-52", "ovs_interfaceid": "d46278a6-5202-4c8b-890f-41286051b6d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.875311] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696079, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712132} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.876652] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 860637a2-8c59-42af-a9f5-4e80c5466274/860637a2-8c59-42af-a9f5-4e80c5466274.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1691.878919] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1691.878919] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed74cdd6-1042-4db1-ab58-91905f35b5ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.888128] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1691.888128] env[62820]: value = "task-1696080" [ 1691.888128] env[62820]: _type = "Task" [ 1691.888128] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.902492] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696080, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.037355] env[62820]: DEBUG oslo_concurrency.lockutils [None req-18ac90f7-3e3f-4156-97b6-a1c2027aa596 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.771s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.254829] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Releasing lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.256026] env[62820]: DEBUG nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Instance network_info: |[{"id": "d46278a6-5202-4c8b-890f-41286051b6d4", "address": "fa:16:3e:8a:d4:a8", "network": {"id": "00ede8b9-7d74-4e3c-a4ba-c2baa25a2890", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-573826592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "001a6dcf0a33474992d8d7c01bc2022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd46278a6-52", "ovs_interfaceid": "d46278a6-5202-4c8b-890f-41286051b6d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1692.256026] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:d4:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd986680e-ad16-45b1-bf6d-cd2fe661679f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd46278a6-5202-4c8b-890f-41286051b6d4', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1692.274331] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Creating folder: Project (001a6dcf0a33474992d8d7c01bc2022d). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1692.279202] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0219c41e-d6d7-4d25-9a33-d21cfacf8abb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.307888] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Created folder: Project (001a6dcf0a33474992d8d7c01bc2022d) in parent group-v353379. [ 1692.311023] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Creating folder: Instances. Parent ref: group-v353630. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1692.311023] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3df8a4c-a1bb-4b43-91d6-637241154298 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.344671] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Created folder: Instances in parent group-v353630. [ 1692.344671] env[62820]: DEBUG oslo.service.loopingcall [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1692.344906] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1692.346826] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9a791e7-2475-4e0f-bb70-45d2c1f5fb38 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.391984] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1692.392371] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-909118c4-7721-4a11-8923-6b9bdf283521 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.400820] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1692.400820] env[62820]: value = "task-1696083" [ 1692.400820] env[62820]: _type = "Task" [ 1692.400820] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.404309] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696080, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125944} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.409594] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1692.409966] env[62820]: DEBUG oslo_vmware.api [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1692.409966] env[62820]: value = "task-1696084" [ 1692.409966] env[62820]: _type = "Task" [ 1692.409966] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.410658] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c7401d-34e3-48b3-b4a7-528f4a9188d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.414147] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9e4f96-8fd1-4bc0-b9f9-8e54981ad50f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.424496] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696083, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.428377] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8b85df-a8b4-4312-b4fb-7e2eb499af37 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.446776] env[62820]: DEBUG oslo_vmware.api [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696084, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.457050] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 860637a2-8c59-42af-a9f5-4e80c5466274/860637a2-8c59-42af-a9f5-4e80c5466274.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1692.457997] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66e15920-60f3-4024-b04a-e94da2017bab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.507527] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e68a5f5-b6ee-4b5f-af4c-6d02fbf5977b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.514582] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1692.514582] env[62820]: value = "task-1696085" [ 1692.514582] env[62820]: _type = "Task" [ 1692.514582] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.522125] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e107167-b883-4045-8248-8ffda60faed3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.535756] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696085, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.546557] env[62820]: DEBUG nova.compute.provider_tree [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1692.917140] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696083, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.929540] env[62820]: DEBUG oslo_vmware.api [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696084, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.027915] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696085, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.050854] env[62820]: DEBUG nova.scheduler.client.report [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1693.422533] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696083, 'name': CreateVM_Task, 'duration_secs': 0.5846} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.428723] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1693.429540] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.429736] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.430108] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1693.430902] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efe24730-43a8-447e-86fe-24d1c2954ba8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.440429] env[62820]: DEBUG oslo_vmware.api [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696084, 'name': PowerOnVM_Task, 'duration_secs': 0.59501} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.441180] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1693.441407] env[62820]: DEBUG nova.compute.manager [None req-281380ed-ebeb-48fb-a91f-d7d1e5aa5562 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1693.442359] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f688b8-0da1-4c7d-aff4-90935865dbaa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.446836] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1693.446836] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522961a9-3d7d-882f-c663-1f3a4121a618" [ 1693.446836] env[62820]: _type = "Task" [ 1693.446836] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.460451] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522961a9-3d7d-882f-c663-1f3a4121a618, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.528776] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696085, 'name': ReconfigVM_Task, 'duration_secs': 0.542685} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.529138] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 860637a2-8c59-42af-a9f5-4e80c5466274/860637a2-8c59-42af-a9f5-4e80c5466274.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1693.529816] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0dea59d-64da-4f53-a88b-c9455b98469b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.540154] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1693.540154] env[62820]: value = "task-1696086" [ 1693.540154] env[62820]: _type = "Task" [ 1693.540154] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.554943] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696086, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.555755] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.056s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.558860] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.374s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.558860] env[62820]: DEBUG nova.objects.instance [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lazy-loading 'resources' on Instance uuid 8a105764-ebd9-4c0a-b555-c5fd5ea8684d {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.586665] env[62820]: INFO nova.scheduler.client.report [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleted allocations for instance 0774673f-e7f2-46ce-b9ec-8fadb36ce192 [ 1693.963932] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522961a9-3d7d-882f-c663-1f3a4121a618, 'name': SearchDatastore_Task, 'duration_secs': 0.018053} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.966223] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.966519] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1693.966774] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.966929] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.967122] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1693.967574] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9384178e-deeb-4016-bba2-4dd937849255 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.985532] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1693.985829] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1693.986790] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49ebd2a8-34b3-43ec-95a5-b98e5519cc08 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.995339] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1693.995339] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52480cc1-bade-5573-9414-d3202de29622" [ 1693.995339] env[62820]: _type = "Task" [ 1693.995339] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.006214] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52480cc1-bade-5573-9414-d3202de29622, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.053435] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696086, 'name': Rename_Task, 'duration_secs': 0.296282} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.053966] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1694.054443] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d18090cd-7891-447f-8c1d-a65581af3fa3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.066828] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1694.066828] env[62820]: value = "task-1696087" [ 1694.066828] env[62820]: _type = "Task" [ 1694.066828] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.083026] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.095456] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d9875b57-0785-4d4d-b569-77434561f6a9 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "0774673f-e7f2-46ce-b9ec-8fadb36ce192" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.986s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.320966] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c14e6e-25cf-410c-9a63-cabd75c2d005 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.330143] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5291de47-c1af-4d62-a169-8ca6cda83c8d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.362869] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046b95f4-6af1-443d-825b-34d1a5b4031a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.371663] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94099f7f-2a06-49f5-b70e-f9a602389471 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.388328] env[62820]: DEBUG nova.compute.provider_tree [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.508307] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52480cc1-bade-5573-9414-d3202de29622, 'name': SearchDatastore_Task, 'duration_secs': 0.020739} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.509126] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de2d342d-25e1-4894-9bb8-d51193d9282e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.515329] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1694.515329] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e5c37b-90f4-c7f2-b5ef-88052947f84b" [ 1694.515329] env[62820]: _type = "Task" [ 1694.515329] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.523898] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e5c37b-90f4-c7f2-b5ef-88052947f84b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.577824] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696087, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.891646] env[62820]: DEBUG nova.scheduler.client.report [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1695.002214] env[62820]: DEBUG oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f68e2-594e-cbfb-3ccd-060918d5a9e7/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1695.002797] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee2bd7c-aec4-423a-8755-55e94be3fd2c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.010558] env[62820]: DEBUG oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f68e2-594e-cbfb-3ccd-060918d5a9e7/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1695.010558] env[62820]: ERROR oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f68e2-594e-cbfb-3ccd-060918d5a9e7/disk-0.vmdk due to incomplete transfer. [ 1695.010737] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-55a41f68-55a5-4d31-b56c-a6125252ef0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.021099] env[62820]: DEBUG oslo_vmware.rw_handles [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f68e2-594e-cbfb-3ccd-060918d5a9e7/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1695.021099] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Uploaded image 1ad372de-b4a3-441d-b9c8-61354d703fed to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1695.023438] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1695.024059] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-90772f6a-9aeb-417a-8c77-53b93dfa3450 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.029957] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e5c37b-90f4-c7f2-b5ef-88052947f84b, 'name': SearchDatastore_Task, 'duration_secs': 0.029512} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.031359] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.031625] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 76bd4a09-300d-460e-8442-21b4f6567698/76bd4a09-300d-460e-8442-21b4f6567698.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1695.032370] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1695.032370] env[62820]: value = "task-1696088" [ 1695.032370] env[62820]: _type = "Task" [ 1695.032370] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.032370] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbc01585-95cd-4df2-9648-b5585351eb70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.044196] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696088, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.045710] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1695.045710] env[62820]: value = "task-1696089" [ 1695.045710] env[62820]: _type = "Task" [ 1695.045710] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.056037] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696089, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.079066] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696087, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.397894] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.403518] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.384s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.406707] env[62820]: INFO nova.compute.claims [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1695.435142] env[62820]: INFO nova.scheduler.client.report [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Deleted allocations for instance 8a105764-ebd9-4c0a-b555-c5fd5ea8684d [ 1695.547164] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696088, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.557622] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696089, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.580046] env[62820]: DEBUG oslo_vmware.api [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696087, 'name': PowerOnVM_Task, 'duration_secs': 1.120651} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.580369] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1695.580586] env[62820]: INFO nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Took 10.58 seconds to spawn the instance on the hypervisor. [ 1695.580773] env[62820]: DEBUG nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1695.581640] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8438de79-4c8a-4ff3-a4de-a47ab524c996 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.589594] env[62820]: DEBUG nova.compute.manager [req-48594df5-f22f-4437-bb35-a5c7e282f199 req-29fbac6b-6607-4c90-92a1-3a455a922121 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Received event network-changed-a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1695.589823] env[62820]: DEBUG nova.compute.manager [req-48594df5-f22f-4437-bb35-a5c7e282f199 req-29fbac6b-6607-4c90-92a1-3a455a922121 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Refreshing instance network info cache due to event network-changed-a3b6a7be-3800-4d75-9bf0-003542502fcb. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1695.590018] env[62820]: DEBUG oslo_concurrency.lockutils [req-48594df5-f22f-4437-bb35-a5c7e282f199 req-29fbac6b-6607-4c90-92a1-3a455a922121 service nova] Acquiring lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.590179] env[62820]: DEBUG oslo_concurrency.lockutils [req-48594df5-f22f-4437-bb35-a5c7e282f199 req-29fbac6b-6607-4c90-92a1-3a455a922121 service nova] Acquired lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.590352] env[62820]: DEBUG nova.network.neutron [req-48594df5-f22f-4437-bb35-a5c7e282f199 req-29fbac6b-6607-4c90-92a1-3a455a922121 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Refreshing network info cache for port a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1695.662903] env[62820]: DEBUG nova.compute.manager [req-e3dc8f0e-91f4-4ca7-bf9e-19930224ed0c req-731254da-efa9-4278-98c2-1a1736859a21 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Received event network-changed-d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1695.662903] env[62820]: DEBUG nova.compute.manager [req-e3dc8f0e-91f4-4ca7-bf9e-19930224ed0c req-731254da-efa9-4278-98c2-1a1736859a21 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Refreshing instance network info cache due to event network-changed-d46278a6-5202-4c8b-890f-41286051b6d4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1695.663430] env[62820]: DEBUG oslo_concurrency.lockutils [req-e3dc8f0e-91f4-4ca7-bf9e-19930224ed0c req-731254da-efa9-4278-98c2-1a1736859a21 service nova] Acquiring lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.663430] env[62820]: DEBUG oslo_concurrency.lockutils [req-e3dc8f0e-91f4-4ca7-bf9e-19930224ed0c req-731254da-efa9-4278-98c2-1a1736859a21 service nova] Acquired lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.663564] env[62820]: DEBUG nova.network.neutron [req-e3dc8f0e-91f4-4ca7-bf9e-19930224ed0c req-731254da-efa9-4278-98c2-1a1736859a21 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Refreshing network info cache for port d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1695.883131] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.883385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.889970] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquiring lock "46217ada-3fab-4dbc-a65e-a3b8e856918d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.890249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "46217ada-3fab-4dbc-a65e-a3b8e856918d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.948148] env[62820]: DEBUG oslo_concurrency.lockutils [None req-135ac0a8-f309-4c61-85c4-e97c2ddfc3f2 tempest-MultipleCreateTestJSON-1191697251 tempest-MultipleCreateTestJSON-1191697251-project-member] Lock "8a105764-ebd9-4c0a-b555-c5fd5ea8684d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.322s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.001075] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-8753570b-f8cd-4945-9a31-822b01c0c867" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.001075] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-8753570b-f8cd-4945-9a31-822b01c0c867" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.001075] env[62820]: DEBUG nova.objects.instance [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'flavor' on Instance uuid 11843b38-3ce4-42a7-b855-a9d0b473e796 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1696.059141] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696089, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580894} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.062448] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 76bd4a09-300d-460e-8442-21b4f6567698/76bd4a09-300d-460e-8442-21b4f6567698.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1696.062707] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1696.063008] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696088, 'name': Destroy_Task, 'duration_secs': 0.6723} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.063289] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aec1692d-df75-4f6a-b771-298fea641d0b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.065382] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Destroyed the VM [ 1696.065672] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1696.066080] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a5c50819-9945-4564-a55e-c77d14e43c44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.075262] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1696.075262] env[62820]: value = "task-1696090" [ 1696.075262] env[62820]: _type = "Task" [ 1696.075262] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.076072] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1696.076072] env[62820]: value = "task-1696091" [ 1696.076072] env[62820]: _type = "Task" [ 1696.076072] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.092092] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696090, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.095732] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696091, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.111230] env[62820]: INFO nova.compute.manager [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Took 26.17 seconds to build instance. [ 1696.371233] env[62820]: DEBUG nova.network.neutron [req-48594df5-f22f-4437-bb35-a5c7e282f199 req-29fbac6b-6607-4c90-92a1-3a455a922121 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Updated VIF entry in instance network info cache for port a3b6a7be-3800-4d75-9bf0-003542502fcb. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1696.371659] env[62820]: DEBUG nova.network.neutron [req-48594df5-f22f-4437-bb35-a5c7e282f199 req-29fbac6b-6607-4c90-92a1-3a455a922121 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Updating instance_info_cache with network_info: [{"id": "a3b6a7be-3800-4d75-9bf0-003542502fcb", "address": "fa:16:3e:88:f1:00", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b6a7be-38", "ovs_interfaceid": "a3b6a7be-3800-4d75-9bf0-003542502fcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.391062] env[62820]: DEBUG nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1696.394135] env[62820]: DEBUG nova.network.neutron [req-e3dc8f0e-91f4-4ca7-bf9e-19930224ed0c req-731254da-efa9-4278-98c2-1a1736859a21 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Updated VIF entry in instance network info cache for port d46278a6-5202-4c8b-890f-41286051b6d4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1696.394571] env[62820]: DEBUG nova.network.neutron [req-e3dc8f0e-91f4-4ca7-bf9e-19930224ed0c req-731254da-efa9-4278-98c2-1a1736859a21 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Updating instance_info_cache with network_info: [{"id": "d46278a6-5202-4c8b-890f-41286051b6d4", "address": "fa:16:3e:8a:d4:a8", "network": {"id": "00ede8b9-7d74-4e3c-a4ba-c2baa25a2890", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-573826592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "001a6dcf0a33474992d8d7c01bc2022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd46278a6-52", "ovs_interfaceid": "d46278a6-5202-4c8b-890f-41286051b6d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.397460] env[62820]: DEBUG nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1696.595958] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696090, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078197} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.596822] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1696.598210] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eec974d-e28f-4448-9263-a98495c33442 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.606060] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696091, 'name': RemoveSnapshot_Task, 'duration_secs': 0.466409} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.606807] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1696.607408] env[62820]: DEBUG nova.compute.manager [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1696.608322] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6921f2d9-939f-4348-afa1-d1c7a664b03f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.620491] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e2adc10d-0dcd-4b40-8179-f173bb9b409e tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.694s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.629407] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 76bd4a09-300d-460e-8442-21b4f6567698/76bd4a09-300d-460e-8442-21b4f6567698.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1696.632589] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee4d66a9-6547-453f-a20e-420146d132ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.649374] env[62820]: DEBUG nova.objects.instance [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'pci_requests' on Instance uuid 11843b38-3ce4-42a7-b855-a9d0b473e796 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1696.659251] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1696.659251] env[62820]: value = "task-1696092" [ 1696.659251] env[62820]: _type = "Task" [ 1696.659251] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.669767] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696092, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.766406] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edb7f75-ce01-4bd8-baf3-38abfae6a877 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.775777] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49571ec6-bbae-41e7-939c-544755d540c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.810535] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "46434419-d6de-4cc1-905c-14698512b7a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.810535] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "46434419-d6de-4cc1-905c-14698512b7a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.811089] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "46434419-d6de-4cc1-905c-14698512b7a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.811089] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "46434419-d6de-4cc1-905c-14698512b7a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.811280] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "46434419-d6de-4cc1-905c-14698512b7a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.814154] env[62820]: INFO nova.compute.manager [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Terminating instance [ 1696.817275] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18b4d3b-a902-43ea-a3da-b570fae57636 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.833441] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b656446-4a6d-41b7-8719-3c1beee22c7a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.850094] env[62820]: DEBUG nova.compute.provider_tree [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1696.874418] env[62820]: DEBUG oslo_concurrency.lockutils [req-48594df5-f22f-4437-bb35-a5c7e282f199 req-29fbac6b-6607-4c90-92a1-3a455a922121 service nova] Releasing lock "refresh_cache-03b0abc8-dd32-4cf9-8750-d64b8a66695e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.902430] env[62820]: DEBUG oslo_concurrency.lockutils [req-e3dc8f0e-91f4-4ca7-bf9e-19930224ed0c req-731254da-efa9-4278-98c2-1a1736859a21 service nova] Releasing lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.919452] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.923506] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.153625] env[62820]: DEBUG nova.objects.base [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Object Instance<11843b38-3ce4-42a7-b855-a9d0b473e796> lazy-loaded attributes: flavor,pci_requests {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1697.153911] env[62820]: DEBUG nova.network.neutron [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1697.159780] env[62820]: INFO nova.compute.manager [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Shelve offloading [ 1697.175384] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696092, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.254794] env[62820]: DEBUG nova.policy [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1697.327877] env[62820]: DEBUG nova.compute.manager [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1697.328256] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1697.329236] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9028519c-39c9-4926-8f59-e5b86316785c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.341543] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1697.341968] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71589037-bc86-4e4b-8c80-96b1cededd15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.350818] env[62820]: DEBUG oslo_vmware.api [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1697.350818] env[62820]: value = "task-1696093" [ 1697.350818] env[62820]: _type = "Task" [ 1697.350818] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.356562] env[62820]: DEBUG nova.scheduler.client.report [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1697.368212] env[62820]: DEBUG oslo_vmware.api [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1696093, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.668726] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1697.669519] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebaffdfc-6c9b-4819-bec9-10f646988832 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.675786] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696092, 'name': ReconfigVM_Task, 'duration_secs': 0.739201} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.677279] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 76bd4a09-300d-460e-8442-21b4f6567698/76bd4a09-300d-460e-8442-21b4f6567698.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1697.678085] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1697.678085] env[62820]: value = "task-1696094" [ 1697.678085] env[62820]: _type = "Task" [ 1697.678085] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.678328] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b01d38f-6af6-4d66-bd05-f0933336eaf3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.691591] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1697.692338] env[62820]: DEBUG nova.compute.manager [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1697.692926] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1697.692926] env[62820]: value = "task-1696095" [ 1697.692926] env[62820]: _type = "Task" [ 1697.692926] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.693769] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a553c3a-1d62-49e5-972d-b736cebffb09 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.704315] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.704510] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.704940] env[62820]: DEBUG nova.network.neutron [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1697.709336] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696095, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.865107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.865680] env[62820]: DEBUG nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1697.868357] env[62820]: DEBUG oslo_vmware.api [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1696093, 'name': PowerOffVM_Task, 'duration_secs': 0.367455} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.868895] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.950s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.870305] env[62820]: INFO nova.compute.claims [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1697.872672] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1697.872852] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1697.873316] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48be7add-aff3-4faf-8eb4-17852231ca7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.970381] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1697.970544] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1697.971295] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleting the datastore file [datastore1] 46434419-d6de-4cc1-905c-14698512b7a5 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1697.971295] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecc78cd7-d9d5-4d7c-9359-1667425c4b7c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.983476] env[62820]: DEBUG oslo_vmware.api [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for the task: (returnval){ [ 1697.983476] env[62820]: value = "task-1696097" [ 1697.983476] env[62820]: _type = "Task" [ 1697.983476] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.997208] env[62820]: DEBUG oslo_vmware.api [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1696097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.207450] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696095, 'name': Rename_Task, 'duration_secs': 0.220763} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.207686] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1698.208270] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c60f9598-2941-43f8-9986-f0e4b9fbeb88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.215869] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1698.215869] env[62820]: value = "task-1696098" [ 1698.215869] env[62820]: _type = "Task" [ 1698.215869] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.224424] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696098, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.370403] env[62820]: DEBUG nova.compute.utils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1698.371750] env[62820]: DEBUG nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1698.371923] env[62820]: DEBUG nova.network.neutron [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1698.491393] env[62820]: DEBUG nova.policy [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba8e4dc4cd634bf293d02187fbc77b72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ca1b6f7bda3437eb67f5f765b5864a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1698.496684] env[62820]: DEBUG oslo_vmware.api [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Task: {'id': task-1696097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195834} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.496867] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1698.497068] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1698.497254] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1698.497425] env[62820]: INFO nova.compute.manager [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1698.497666] env[62820]: DEBUG oslo.service.loopingcall [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1698.497857] env[62820]: DEBUG nova.compute.manager [-] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1698.497950] env[62820]: DEBUG nova.network.neutron [-] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1698.640334] env[62820]: DEBUG nova.network.neutron [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [{"id": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "address": "fa:16:3e:ed:27:27", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c81ac6d-fc", "ovs_interfaceid": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.728667] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696098, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.875347] env[62820]: DEBUG nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1699.000861] env[62820]: DEBUG nova.network.neutron [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Successfully created port: 34e8df14-7464-4d07-81cb-333b36342136 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1699.145707] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1699.192035] env[62820]: DEBUG nova.compute.manager [req-4e2ce62b-8bd0-46f6-af3f-1e9eacc34b30 req-496fa209-911b-4f45-88c5-e6a8adf191c9 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Received event network-vif-deleted-b36fcffd-baf9-4baa-a860-018d98ea5451 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1699.193750] env[62820]: INFO nova.compute.manager [req-4e2ce62b-8bd0-46f6-af3f-1e9eacc34b30 req-496fa209-911b-4f45-88c5-e6a8adf191c9 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Neutron deleted interface b36fcffd-baf9-4baa-a860-018d98ea5451; detaching it from the instance and deleting it from the info cache [ 1699.193750] env[62820]: DEBUG nova.network.neutron [req-4e2ce62b-8bd0-46f6-af3f-1e9eacc34b30 req-496fa209-911b-4f45-88c5-e6a8adf191c9 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.218173] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae909b4d-2343-4379-bc99-e71b2c5b4056 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.240428] env[62820]: DEBUG oslo_vmware.api [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696098, 'name': PowerOnVM_Task, 'duration_secs': 0.786054} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.242611] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1699.242829] env[62820]: INFO nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Took 9.80 seconds to spawn the instance on the hypervisor. [ 1699.243089] env[62820]: DEBUG nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1699.243877] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda84de4-22ac-461e-b3e1-816f51505019 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.247932] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da0ca5a-db66-4047-827f-112aaa757857 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.292181] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef79b20-b8d6-45f6-adc1-ce1dd196d720 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.300801] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e521c2-0495-419c-b1e7-fdacc562b65c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.316393] env[62820]: DEBUG nova.compute.provider_tree [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.482661] env[62820]: DEBUG nova.network.neutron [-] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.545206] env[62820]: DEBUG nova.network.neutron [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Successfully updated port: 8753570b-f8cd-4945-9a31-822b01c0c867 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1699.573627] env[62820]: DEBUG nova.compute.manager [req-96f27896-82b9-4b12-b9ed-af901e2556b4 req-504c6311-a126-439a-ba37-85fd5b512be7 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-vif-plugged-8753570b-f8cd-4945-9a31-822b01c0c867 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1699.573627] env[62820]: DEBUG oslo_concurrency.lockutils [req-96f27896-82b9-4b12-b9ed-af901e2556b4 req-504c6311-a126-439a-ba37-85fd5b512be7 service nova] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.573831] env[62820]: DEBUG oslo_concurrency.lockutils [req-96f27896-82b9-4b12-b9ed-af901e2556b4 req-504c6311-a126-439a-ba37-85fd5b512be7 service nova] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.574128] env[62820]: DEBUG oslo_concurrency.lockutils [req-96f27896-82b9-4b12-b9ed-af901e2556b4 req-504c6311-a126-439a-ba37-85fd5b512be7 service nova] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.574336] env[62820]: DEBUG nova.compute.manager [req-96f27896-82b9-4b12-b9ed-af901e2556b4 req-504c6311-a126-439a-ba37-85fd5b512be7 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] No waiting events found dispatching network-vif-plugged-8753570b-f8cd-4945-9a31-822b01c0c867 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1699.574668] env[62820]: WARNING nova.compute.manager [req-96f27896-82b9-4b12-b9ed-af901e2556b4 req-504c6311-a126-439a-ba37-85fd5b512be7 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received unexpected event network-vif-plugged-8753570b-f8cd-4945-9a31-822b01c0c867 for instance with vm_state active and task_state None. [ 1699.621163] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1699.622379] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0b9e0f-7be9-41df-8900-2251684f5a00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.632411] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1699.632755] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b9aa478-e16d-42a1-bf2b-c7475c51d5eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.698487] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6be368e1-b219-4a10-8abd-b0902a28a01d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.711011] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1f323a-5f53-4e51-9242-596c94da393c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.722631] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1699.722835] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1699.723033] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleting the datastore file [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1699.723793] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c84b579-06e1-4da6-bade-34b9e71581fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.736144] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1699.736144] env[62820]: value = "task-1696100" [ 1699.736144] env[62820]: _type = "Task" [ 1699.736144] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.755422] env[62820]: DEBUG nova.compute.manager [req-4e2ce62b-8bd0-46f6-af3f-1e9eacc34b30 req-496fa209-911b-4f45-88c5-e6a8adf191c9 service nova] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Detach interface failed, port_id=b36fcffd-baf9-4baa-a860-018d98ea5451, reason: Instance 46434419-d6de-4cc1-905c-14698512b7a5 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1699.759053] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.798938] env[62820]: INFO nova.compute.manager [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Took 24.71 seconds to build instance. [ 1699.820293] env[62820]: DEBUG nova.scheduler.client.report [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1699.892665] env[62820]: DEBUG nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1699.919902] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1699.920248] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1699.920460] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1699.920712] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1699.920924] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1699.921158] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1699.921441] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1699.921656] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1699.921884] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1699.922120] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1699.922360] env[62820]: DEBUG nova.virt.hardware [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1699.923615] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91fbaf1-f3e8-462a-909d-c9582e2c1e9a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.938482] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd061-f5c3-caf8-2520-57c9ae9d3879/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1699.939987] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc51faba-22e2-47dc-87fb-6e82acc9930e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.945075] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add774da-81fe-4053-97d9-96c0dde7c799 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.963078] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd061-f5c3-caf8-2520-57c9ae9d3879/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1699.963287] env[62820]: ERROR oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd061-f5c3-caf8-2520-57c9ae9d3879/disk-0.vmdk due to incomplete transfer. [ 1699.963557] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-99ddf40d-fdb5-40c3-b8dc-212d49e85de6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.973521] env[62820]: DEBUG oslo_vmware.rw_handles [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd061-f5c3-caf8-2520-57c9ae9d3879/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1699.973904] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Uploaded image d1f53c66-1541-4919-9985-f1fa793ea874 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1699.976681] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1699.976681] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-60d14231-a7e8-4f8c-b5a3-d042cf0222cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.984721] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1699.984721] env[62820]: value = "task-1696101" [ 1699.984721] env[62820]: _type = "Task" [ 1699.984721] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.988122] env[62820]: INFO nova.compute.manager [-] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Took 1.49 seconds to deallocate network for instance. [ 1699.997226] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696101, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.047545] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1700.047690] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1700.047879] env[62820]: DEBUG nova.network.neutron [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1700.248647] env[62820]: DEBUG oslo_vmware.api [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696100, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16255} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.248647] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1700.248878] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1700.249016] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1700.272281] env[62820]: INFO nova.scheduler.client.report [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleted allocations for instance e420644c-cfcc-4f8c-ae03-c9ebef585690 [ 1700.302682] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d3025a3d-8595-4522-ae8f-fd8e5777b284 tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "76bd4a09-300d-460e-8442-21b4f6567698" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.220s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.332038] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.332436] env[62820]: DEBUG nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1700.335106] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.412s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.336810] env[62820]: INFO nova.compute.claims [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1700.495167] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696101, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.499277] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.605611] env[62820]: WARNING nova.network.neutron [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] 26851e2e-dece-4dce-bec8-e64227003b80 already exists in list: networks containing: ['26851e2e-dece-4dce-bec8-e64227003b80']. ignoring it [ 1700.605849] env[62820]: WARNING nova.network.neutron [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] 26851e2e-dece-4dce-bec8-e64227003b80 already exists in list: networks containing: ['26851e2e-dece-4dce-bec8-e64227003b80']. ignoring it [ 1700.669234] env[62820]: DEBUG nova.compute.manager [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1700.777638] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.787595] env[62820]: DEBUG nova.network.neutron [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Successfully updated port: 34e8df14-7464-4d07-81cb-333b36342136 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1700.843804] env[62820]: DEBUG nova.compute.utils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1700.849032] env[62820]: DEBUG nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1700.849032] env[62820]: DEBUG nova.network.neutron [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1700.925864] env[62820]: DEBUG nova.policy [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e81a169ac4144a5bbc0a4e3a077cb4a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65abf73e789b48d3ba24e2660d7c0341', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1700.999436] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696101, 'name': Destroy_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.202700] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.292363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "refresh_cache-9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.292523] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "refresh_cache-9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.292683] env[62820]: DEBUG nova.network.neutron [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1701.305209] env[62820]: DEBUG nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received event network-vif-unplugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1701.305427] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Acquiring lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.305635] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.305774] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.305925] env[62820]: DEBUG nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] No waiting events found dispatching network-vif-unplugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1701.306100] env[62820]: WARNING nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received unexpected event network-vif-unplugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 for instance with vm_state shelved_offloaded and task_state None. [ 1701.306401] env[62820]: DEBUG nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received event network-changed-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1701.306470] env[62820]: DEBUG nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Refreshing instance network info cache due to event network-changed-8c81ac6d-fc1a-4519-81f6-1a3a523acee9. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1701.306639] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Acquiring lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.306772] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Acquired lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.306946] env[62820]: DEBUG nova.network.neutron [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Refreshing network info cache for port 8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1701.325424] env[62820]: DEBUG nova.network.neutron [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f0084819-f55d-4bd8-a480-72eab0bdd647", "address": "fa:16:3e:ff:4b:7d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0084819-f5", "ovs_interfaceid": "f0084819-f55d-4bd8-a480-72eab0bdd647", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8753570b-f8cd-4945-9a31-822b01c0c867", "address": "fa:16:3e:9d:1a:02", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8753570b-f8", "ovs_interfaceid": "8753570b-f8cd-4945-9a31-822b01c0c867", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.351052] env[62820]: DEBUG nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1701.389449] env[62820]: DEBUG nova.network.neutron [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Successfully created port: af0b710a-f322-429b-9530-abac794409f9 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1701.501623] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696101, 'name': Destroy_Task, 'duration_secs': 1.025946} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.501919] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Destroyed the VM [ 1701.502171] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1701.502698] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5012ccca-5e67-4788-8ac5-50c6d59c502e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.515861] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1701.515861] env[62820]: value = "task-1696102" [ 1701.515861] env[62820]: _type = "Task" [ 1701.515861] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.530825] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696102, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.651773] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85d52d6-c519-44eb-98fa-8d00f82b9cf4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.662641] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a6920c-83f0-4a43-a25a-e48cd580ce87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.697399] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997dc8c1-bb6d-4a0a-a377-3b99500ea1d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.706466] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33be46f7-0b8b-49f3-ae77-9c087ea0ff8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.721232] env[62820]: DEBUG nova.compute.provider_tree [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1701.733309] env[62820]: DEBUG nova.compute.manager [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-changed-8753570b-f8cd-4945-9a31-822b01c0c867 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1701.733522] env[62820]: DEBUG nova.compute.manager [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Refreshing instance network info cache due to event network-changed-8753570b-f8cd-4945-9a31-822b01c0c867. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1701.733720] env[62820]: DEBUG oslo_concurrency.lockutils [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] Acquiring lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.824955] env[62820]: DEBUG nova.network.neutron [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1701.827537] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.828397] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.828514] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.828895] env[62820]: DEBUG oslo_concurrency.lockutils [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] Acquired lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.829201] env[62820]: DEBUG nova.network.neutron [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Refreshing network info cache for port 8753570b-f8cd-4945-9a31-822b01c0c867 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1701.834478] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309fd77b-5ea4-4c73-8fb8-61f672bf91d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.868458] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1701.868740] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1701.868929] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.869201] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1701.869335] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.869467] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1701.869726] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1701.869920] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1701.870108] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1701.870297] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1701.870503] env[62820]: DEBUG nova.virt.hardware [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1701.876847] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Reconfiguring VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1701.880429] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8c5c1ba-f381-4a0b-ba63-45e4230e82cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.899989] env[62820]: DEBUG oslo_vmware.api [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1701.899989] env[62820]: value = "task-1696103" [ 1701.899989] env[62820]: _type = "Task" [ 1701.899989] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.910017] env[62820]: DEBUG oslo_vmware.api [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696103, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.018380] env[62820]: DEBUG nova.network.neutron [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Updating instance_info_cache with network_info: [{"id": "34e8df14-7464-4d07-81cb-333b36342136", "address": "fa:16:3e:91:36:f9", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e8df14-74", "ovs_interfaceid": "34e8df14-7464-4d07-81cb-333b36342136", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.027424] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696102, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.066156] env[62820]: DEBUG nova.network.neutron [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updated VIF entry in instance network info cache for port 8c81ac6d-fc1a-4519-81f6-1a3a523acee9. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1702.066634] env[62820]: DEBUG nova.network.neutron [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [{"id": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "address": "fa:16:3e:ed:27:27", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": null, "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8c81ac6d-fc", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.224789] env[62820]: DEBUG nova.scheduler.client.report [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1702.381830] env[62820]: DEBUG nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1702.419478] env[62820]: DEBUG oslo_vmware.api [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.422697] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1702.422976] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1702.423196] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1702.423449] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1702.423658] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1702.423848] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1702.424106] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1702.424313] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1702.424525] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1702.424728] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1702.424944] env[62820]: DEBUG nova.virt.hardware [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1702.425808] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218cfd68-8aeb-4ca2-8319-921e8160270d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.434817] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef8665b-e0e4-4115-9d50-9a56a81c1336 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.522803] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "refresh_cache-9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.523194] env[62820]: DEBUG nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Instance network_info: |[{"id": "34e8df14-7464-4d07-81cb-333b36342136", "address": "fa:16:3e:91:36:f9", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e8df14-74", "ovs_interfaceid": "34e8df14-7464-4d07-81cb-333b36342136", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1702.523701] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:36:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34e8df14-7464-4d07-81cb-333b36342136', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1702.534049] env[62820]: DEBUG oslo.service.loopingcall [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1702.537482] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1702.541116] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7423c6dc-b819-42e1-85a9-61686ade9e3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.556677] env[62820]: DEBUG oslo_vmware.api [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696102, 'name': RemoveSnapshot_Task, 'duration_secs': 0.873744} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.556978] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1702.557227] env[62820]: INFO nova.compute.manager [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Took 17.62 seconds to snapshot the instance on the hypervisor. [ 1702.561686] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.568732] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1702.568732] env[62820]: value = "task-1696104" [ 1702.568732] env[62820]: _type = "Task" [ 1702.568732] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.569303] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Releasing lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.569514] env[62820]: DEBUG nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Received event network-vif-plugged-34e8df14-7464-4d07-81cb-333b36342136 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1702.569703] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Acquiring lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.569900] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.570073] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.570245] env[62820]: DEBUG nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] No waiting events found dispatching network-vif-plugged-34e8df14-7464-4d07-81cb-333b36342136 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1702.570424] env[62820]: WARNING nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Received unexpected event network-vif-plugged-34e8df14-7464-4d07-81cb-333b36342136 for instance with vm_state building and task_state spawning. [ 1702.570596] env[62820]: DEBUG nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Received event network-changed-34e8df14-7464-4d07-81cb-333b36342136 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1702.570765] env[62820]: DEBUG nova.compute.manager [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Refreshing instance network info cache due to event network-changed-34e8df14-7464-4d07-81cb-333b36342136. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1702.570938] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Acquiring lock "refresh_cache-9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.571095] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Acquired lock "refresh_cache-9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.571259] env[62820]: DEBUG nova.network.neutron [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Refreshing network info cache for port 34e8df14-7464-4d07-81cb-333b36342136 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1702.582691] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696104, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.713410] env[62820]: DEBUG nova.network.neutron [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updated VIF entry in instance network info cache for port 8753570b-f8cd-4945-9a31-822b01c0c867. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1702.713901] env[62820]: DEBUG nova.network.neutron [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f0084819-f55d-4bd8-a480-72eab0bdd647", "address": "fa:16:3e:ff:4b:7d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0084819-f5", "ovs_interfaceid": "f0084819-f55d-4bd8-a480-72eab0bdd647", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8753570b-f8cd-4945-9a31-822b01c0c867", "address": "fa:16:3e:9d:1a:02", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8753570b-f8", "ovs_interfaceid": "8753570b-f8cd-4945-9a31-822b01c0c867", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.729222] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.731937] env[62820]: DEBUG nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1702.732319] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.233s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.732553] env[62820]: DEBUG nova.objects.instance [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lazy-loading 'resources' on Instance uuid 46434419-d6de-4cc1-905c-14698512b7a5 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1702.911771] env[62820]: DEBUG oslo_vmware.api [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696103, 'name': ReconfigVM_Task, 'duration_secs': 0.676033} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.912421] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.912654] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Reconfigured VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1702.989464] env[62820]: DEBUG nova.network.neutron [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Successfully updated port: af0b710a-f322-429b-9530-abac794409f9 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1703.080960] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696104, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.119875] env[62820]: DEBUG nova.compute.manager [None req-d1e0cb84-b1bf-48d3-a722-b495fd607aba tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Found 2 images (rotation: 2) {{(pid=62820) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4898}} [ 1703.219346] env[62820]: DEBUG oslo_concurrency.lockutils [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] Releasing lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.219590] env[62820]: DEBUG nova.compute.manager [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Received event network-changed-d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1703.219763] env[62820]: DEBUG nova.compute.manager [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Refreshing instance network info cache due to event network-changed-d46278a6-5202-4c8b-890f-41286051b6d4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1703.219966] env[62820]: DEBUG oslo_concurrency.lockutils [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] Acquiring lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.221237] env[62820]: DEBUG oslo_concurrency.lockutils [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] Acquired lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.221300] env[62820]: DEBUG nova.network.neutron [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Refreshing network info cache for port d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1703.235420] env[62820]: DEBUG nova.compute.utils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1703.239847] env[62820]: DEBUG nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1703.239847] env[62820]: DEBUG nova.network.neutron [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1703.293416] env[62820]: DEBUG nova.network.neutron [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Updated VIF entry in instance network info cache for port 34e8df14-7464-4d07-81cb-333b36342136. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.293835] env[62820]: DEBUG nova.network.neutron [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Updating instance_info_cache with network_info: [{"id": "34e8df14-7464-4d07-81cb-333b36342136", "address": "fa:16:3e:91:36:f9", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e8df14-74", "ovs_interfaceid": "34e8df14-7464-4d07-81cb-333b36342136", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.306723] env[62820]: DEBUG nova.policy [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '285983ab4e974ef1b7fd7b433c0f12e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2b10e16241b4d2ab29b164d08e0653d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1703.417460] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ebe9e4d4-b7a5-4cb3-b62e-fd441e62c934 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-8753570b-f8cd-4945-9a31-822b01c0c867" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.418s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.492588] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.492740] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.493032] env[62820]: DEBUG nova.network.neutron [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1703.536176] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af47e47a-f03a-4893-bac6-44e03ed6dc71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.543856] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b327e692-a004-4d8b-815a-eac2b9179fa7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.587879] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2118db9-38c9-4652-b576-35eafdc75ee0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.601496] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794921ee-8f84-4d69-8292-360e45484375 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.609019] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696104, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.616045] env[62820]: DEBUG nova.compute.provider_tree [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.697951] env[62820]: DEBUG nova.network.neutron [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Successfully created port: b2ce5106-4423-4570-ac4f-58a8fef758e7 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1703.711685] env[62820]: DEBUG nova.compute.manager [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1703.712620] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d45239-4c46-4ddf-aab9-22fdfb57b474 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.725869] env[62820]: DEBUG nova.compute.manager [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Received event network-vif-plugged-af0b710a-f322-429b-9530-abac794409f9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1703.726120] env[62820]: DEBUG oslo_concurrency.lockutils [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] Acquiring lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.726554] env[62820]: DEBUG oslo_concurrency.lockutils [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] Lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.726554] env[62820]: DEBUG oslo_concurrency.lockutils [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] Lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.726737] env[62820]: DEBUG nova.compute.manager [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] No waiting events found dispatching network-vif-plugged-af0b710a-f322-429b-9530-abac794409f9 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1703.726949] env[62820]: WARNING nova.compute.manager [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Received unexpected event network-vif-plugged-af0b710a-f322-429b-9530-abac794409f9 for instance with vm_state building and task_state spawning. [ 1703.727223] env[62820]: DEBUG nova.compute.manager [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Received event network-changed-af0b710a-f322-429b-9530-abac794409f9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1703.727402] env[62820]: DEBUG nova.compute.manager [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Refreshing instance network info cache due to event network-changed-af0b710a-f322-429b-9530-abac794409f9. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1703.727635] env[62820]: DEBUG oslo_concurrency.lockutils [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] Acquiring lock "refresh_cache-4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.743612] env[62820]: DEBUG nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1703.796740] env[62820]: DEBUG oslo_concurrency.lockutils [req-b55801f2-b29f-4110-a3d8-39e3ec2a5e7b req-561b063c-08c0-4de2-83a0-db2ca126179f service nova] Releasing lock "refresh_cache-9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.961632] env[62820]: DEBUG nova.network.neutron [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Updated VIF entry in instance network info cache for port d46278a6-5202-4c8b-890f-41286051b6d4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.962026] env[62820]: DEBUG nova.network.neutron [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Updating instance_info_cache with network_info: [{"id": "d46278a6-5202-4c8b-890f-41286051b6d4", "address": "fa:16:3e:8a:d4:a8", "network": {"id": "00ede8b9-7d74-4e3c-a4ba-c2baa25a2890", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-573826592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "001a6dcf0a33474992d8d7c01bc2022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd46278a6-52", "ovs_interfaceid": "d46278a6-5202-4c8b-890f-41286051b6d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.028846] env[62820]: DEBUG nova.network.neutron [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1704.095179] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696104, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.119257] env[62820]: DEBUG nova.scheduler.client.report [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1704.229484] env[62820]: INFO nova.compute.manager [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] instance snapshotting [ 1704.230110] env[62820]: DEBUG nova.objects.instance [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'flavor' on Instance uuid 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.241131] env[62820]: DEBUG nova.network.neutron [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Updating instance_info_cache with network_info: [{"id": "af0b710a-f322-429b-9530-abac794409f9", "address": "fa:16:3e:e6:8f:54", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf0b710a-f3", "ovs_interfaceid": "af0b710a-f322-429b-9530-abac794409f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.465692] env[62820]: DEBUG oslo_concurrency.lockutils [req-da063943-6235-408a-9e5a-b0228de819e4 req-5c76493e-9597-40e7-87e0-ad15a5959d95 service nova] Releasing lock "refresh_cache-76bd4a09-300d-460e-8442-21b4f6567698" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.595172] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696104, 'name': CreateVM_Task, 'duration_secs': 1.589309} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.595363] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1704.596074] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.596249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.596567] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1704.596820] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d3e4194-8a65-4134-a5a4-723f6a415264 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.601474] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1704.601474] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5292f42f-e133-4b4c-4f4b-6dfb75d2a468" [ 1704.601474] env[62820]: _type = "Task" [ 1704.601474] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.609166] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5292f42f-e133-4b4c-4f4b-6dfb75d2a468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.629705] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.897s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.631596] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.854s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.631855] env[62820]: DEBUG nova.objects.instance [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lazy-loading 'resources' on Instance uuid e420644c-cfcc-4f8c-ae03-c9ebef585690 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.653503] env[62820]: INFO nova.scheduler.client.report [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Deleted allocations for instance 46434419-d6de-4cc1-905c-14698512b7a5 [ 1704.738898] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640f4541-7cde-46a8-9d02-845820660324 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.742305] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.742614] env[62820]: DEBUG nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Instance network_info: |[{"id": "af0b710a-f322-429b-9530-abac794409f9", "address": "fa:16:3e:e6:8f:54", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf0b710a-f3", "ovs_interfaceid": "af0b710a-f322-429b-9530-abac794409f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1704.742900] env[62820]: DEBUG oslo_concurrency.lockutils [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] Acquired lock "refresh_cache-4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.743093] env[62820]: DEBUG nova.network.neutron [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Refreshing network info cache for port af0b710a-f322-429b-9530-abac794409f9 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1704.744285] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:8f:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af0b710a-f322-429b-9530-abac794409f9', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1704.751548] env[62820]: DEBUG oslo.service.loopingcall [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.755588] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1704.756603] env[62820]: DEBUG nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1704.771725] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-478ba3ae-5c10-44a7-8f18-216fdd926ee8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.788459] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c2f991-6652-4103-a243-28996f05383d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.796653] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1704.797853] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1704.797853] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1704.797853] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1704.797853] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1704.797853] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1704.798155] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1704.798155] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1704.798286] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1704.798474] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1704.798637] env[62820]: DEBUG nova.virt.hardware [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1704.799465] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27f0cb0-b4eb-48ee-8e67-80f9cc9e4361 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.806627] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1704.806627] env[62820]: value = "task-1696105" [ 1704.806627] env[62820]: _type = "Task" [ 1704.806627] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.813082] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f4c675-d201-4550-b95c-ea935f7c4d70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.821938] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696105, 'name': CreateVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.973881] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-f0084819-f55d-4bd8-a480-72eab0bdd647" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.973881] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-f0084819-f55d-4bd8-a480-72eab0bdd647" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.075972] env[62820]: DEBUG nova.network.neutron [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Updated VIF entry in instance network info cache for port af0b710a-f322-429b-9530-abac794409f9. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1705.076412] env[62820]: DEBUG nova.network.neutron [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Updating instance_info_cache with network_info: [{"id": "af0b710a-f322-429b-9530-abac794409f9", "address": "fa:16:3e:e6:8f:54", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf0b710a-f3", "ovs_interfaceid": "af0b710a-f322-429b-9530-abac794409f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.112710] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5292f42f-e133-4b4c-4f4b-6dfb75d2a468, 'name': SearchDatastore_Task, 'duration_secs': 0.011564} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.113052] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.113299] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1705.113595] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.113779] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.113939] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1705.114220] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aeac13b2-3dd9-4238-9aee-5ccdd880b989 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.123118] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1705.123328] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1705.123912] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00fb1889-fcf4-4c1a-9c1c-4655c2f5b3f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.129580] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1705.129580] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5204577b-68b0-a7e1-dbb0-91e0faa74a00" [ 1705.129580] env[62820]: _type = "Task" [ 1705.129580] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.134090] env[62820]: DEBUG nova.objects.instance [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lazy-loading 'numa_topology' on Instance uuid e420644c-cfcc-4f8c-ae03-c9ebef585690 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1705.138491] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5204577b-68b0-a7e1-dbb0-91e0faa74a00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.160943] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c54bdef8-17ca-4ce8-b378-45a0521a0410 tempest-ListServerFiltersTestJSON-1615622352 tempest-ListServerFiltersTestJSON-1615622352-project-member] Lock "46434419-d6de-4cc1-905c-14698512b7a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.350s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.251147] env[62820]: DEBUG nova.compute.manager [req-46d7cabf-921f-4f85-aab7-c6f882daa20b req-f98d0f02-bb94-47d9-b35d-f03e898027ff service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Received event network-vif-plugged-b2ce5106-4423-4570-ac4f-58a8fef758e7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1705.251647] env[62820]: DEBUG oslo_concurrency.lockutils [req-46d7cabf-921f-4f85-aab7-c6f882daa20b req-f98d0f02-bb94-47d9-b35d-f03e898027ff service nova] Acquiring lock "46217ada-3fab-4dbc-a65e-a3b8e856918d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.251828] env[62820]: DEBUG oslo_concurrency.lockutils [req-46d7cabf-921f-4f85-aab7-c6f882daa20b req-f98d0f02-bb94-47d9-b35d-f03e898027ff service nova] Lock "46217ada-3fab-4dbc-a65e-a3b8e856918d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.251960] env[62820]: DEBUG oslo_concurrency.lockutils [req-46d7cabf-921f-4f85-aab7-c6f882daa20b req-f98d0f02-bb94-47d9-b35d-f03e898027ff service nova] Lock "46217ada-3fab-4dbc-a65e-a3b8e856918d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.252149] env[62820]: DEBUG nova.compute.manager [req-46d7cabf-921f-4f85-aab7-c6f882daa20b req-f98d0f02-bb94-47d9-b35d-f03e898027ff service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] No waiting events found dispatching network-vif-plugged-b2ce5106-4423-4570-ac4f-58a8fef758e7 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1705.252319] env[62820]: WARNING nova.compute.manager [req-46d7cabf-921f-4f85-aab7-c6f882daa20b req-f98d0f02-bb94-47d9-b35d-f03e898027ff service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Received unexpected event network-vif-plugged-b2ce5106-4423-4570-ac4f-58a8fef758e7 for instance with vm_state building and task_state spawning. [ 1705.312589] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1705.312874] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d99712eb-d732-4a87-8537-a6e62e769646 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.320943] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696105, 'name': CreateVM_Task, 'duration_secs': 0.430222} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.322188] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1705.322466] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1705.322466] env[62820]: value = "task-1696106" [ 1705.322466] env[62820]: _type = "Task" [ 1705.322466] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.323115] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.323281] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.323777] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1705.323875] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eea23bb-3bc1-4ee6-8750-56a1c376652f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.334125] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696106, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.334607] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1705.334607] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b6e58a-af11-c082-15e1-57aec3ceaf93" [ 1705.334607] env[62820]: _type = "Task" [ 1705.334607] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.342102] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b6e58a-af11-c082-15e1-57aec3ceaf93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.380782] env[62820]: DEBUG nova.network.neutron [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Successfully updated port: b2ce5106-4423-4570-ac4f-58a8fef758e7 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1705.478798] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.478951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.479924] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9642bce-4b06-498c-826c-384272a936d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.499886] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02ee509-e93f-4896-b74e-9973efb71820 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.529453] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Reconfiguring VM to detach interface {{(pid=62820) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1705.530099] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01e2e43b-5c20-4b31-871b-537d25b089b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.550392] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1705.550392] env[62820]: value = "task-1696107" [ 1705.550392] env[62820]: _type = "Task" [ 1705.550392] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.557666] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.579459] env[62820]: DEBUG oslo_concurrency.lockutils [req-436167a5-c519-4677-8197-ad5566b06cd9 req-b774fe55-dc43-4f21-b3ca-05817e1b2599 service nova] Releasing lock "refresh_cache-4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.639461] env[62820]: DEBUG nova.objects.base [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1705.641862] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5204577b-68b0-a7e1-dbb0-91e0faa74a00, 'name': SearchDatastore_Task, 'duration_secs': 0.010231} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.643101] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abc6d063-56a9-4a41-b4ec-8eb06842c8fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.648311] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1705.648311] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526c3a1d-ffbe-3cd9-f3f7-9746707f6f37" [ 1705.648311] env[62820]: _type = "Task" [ 1705.648311] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.657413] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526c3a1d-ffbe-3cd9-f3f7-9746707f6f37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.835862] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696106, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.845898] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b6e58a-af11-c082-15e1-57aec3ceaf93, 'name': SearchDatastore_Task, 'duration_secs': 0.010651} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.848675] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.848959] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1705.849180] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.883976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquiring lock "refresh_cache-46217ada-3fab-4dbc-a65e-a3b8e856918d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.884230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquired lock "refresh_cache-46217ada-3fab-4dbc-a65e-a3b8e856918d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.884395] env[62820]: DEBUG nova.network.neutron [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1705.898725] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9d1325-abfe-49c3-ba8c-2f8fa1f169f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.906834] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f506eb9f-6e31-4aea-9050-b3ccb5ed739c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.938186] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d783fca5-5fd5-4344-b1de-f84406c853ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.946201] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f81cdc-3768-49fd-897f-dcfefaec1c87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.963753] env[62820]: DEBUG nova.compute.provider_tree [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1706.060029] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.159427] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526c3a1d-ffbe-3cd9-f3f7-9746707f6f37, 'name': SearchDatastore_Task, 'duration_secs': 0.009912} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.159691] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.160319] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369/9acf0d8f-2daa-4c3a-9ac0-a1be12e56369.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1706.160319] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1706.160564] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1706.160609] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c39e0ed-7ca3-4011-9d8e-97a69f0006fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.162478] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-949c15ba-c48a-42a7-814c-31ac6c8a6bb9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.169528] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1706.169528] env[62820]: value = "task-1696108" [ 1706.169528] env[62820]: _type = "Task" [ 1706.169528] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.173343] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1706.173524] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1706.174685] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb34b6ae-64d0-4a6f-9bde-400d05830664 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.179769] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.182760] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1706.182760] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52800411-2a41-1af7-18fa-aa9a5245098c" [ 1706.182760] env[62820]: _type = "Task" [ 1706.182760] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.190816] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52800411-2a41-1af7-18fa-aa9a5245098c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.334491] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696106, 'name': CreateSnapshot_Task, 'duration_secs': 0.521039} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.334833] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1706.335704] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1091ddef-6897-475a-8686-5eaf45f98d76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.425554] env[62820]: DEBUG nova.network.neutron [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1706.466911] env[62820]: DEBUG nova.scheduler.client.report [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1706.560541] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.584966] env[62820]: DEBUG nova.network.neutron [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Updating instance_info_cache with network_info: [{"id": "b2ce5106-4423-4570-ac4f-58a8fef758e7", "address": "fa:16:3e:66:04:d0", "network": {"id": "1b14e126-1bde-4cfc-8919-bb2e37a1b9b7", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-323699130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2b10e16241b4d2ab29b164d08e0653d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ce5106-44", "ovs_interfaceid": "b2ce5106-4423-4570-ac4f-58a8fef758e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.679752] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696108, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477269} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.681423] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369/9acf0d8f-2daa-4c3a-9ac0-a1be12e56369.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1706.681423] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1706.681423] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04ba182c-e34a-4267-aa3c-979a3448e64e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.688261] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1706.688261] env[62820]: value = "task-1696109" [ 1706.688261] env[62820]: _type = "Task" [ 1706.688261] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.695425] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52800411-2a41-1af7-18fa-aa9a5245098c, 'name': SearchDatastore_Task, 'duration_secs': 0.009017} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.696529] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3012e922-1f83-4ccf-bc00-96ff1932ef76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.702938] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696109, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.707613] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1706.707613] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b89ff1-2187-fc1a-5d22-e90c78bc11df" [ 1706.707613] env[62820]: _type = "Task" [ 1706.707613] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.716283] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b89ff1-2187-fc1a-5d22-e90c78bc11df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.855531] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1706.855898] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8733cbcb-d401-4b46-beb4-daeca4385e38 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.865607] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1706.865607] env[62820]: value = "task-1696110" [ 1706.865607] env[62820]: _type = "Task" [ 1706.865607] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.874776] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696110, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.976036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.344s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.979725] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.776s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.061180] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.088393] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Releasing lock "refresh_cache-46217ada-3fab-4dbc-a65e-a3b8e856918d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.088719] env[62820]: DEBUG nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Instance network_info: |[{"id": "b2ce5106-4423-4570-ac4f-58a8fef758e7", "address": "fa:16:3e:66:04:d0", "network": {"id": "1b14e126-1bde-4cfc-8919-bb2e37a1b9b7", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-323699130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2b10e16241b4d2ab29b164d08e0653d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ce5106-44", "ovs_interfaceid": "b2ce5106-4423-4570-ac4f-58a8fef758e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1707.089188] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:04:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '42f08482-a1da-405d-9918-d733d9f5173c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2ce5106-4423-4570-ac4f-58a8fef758e7', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1707.096855] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Creating folder: Project (c2b10e16241b4d2ab29b164d08e0653d). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1707.097208] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6cd8791-c457-44d6-bc72-3da2f5552215 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.109056] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Created folder: Project (c2b10e16241b4d2ab29b164d08e0653d) in parent group-v353379. [ 1707.109056] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Creating folder: Instances. Parent ref: group-v353637. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1707.109056] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f4aebb4-0fab-48fa-b7f0-412e7f738c5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.117580] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Created folder: Instances in parent group-v353637. [ 1707.117821] env[62820]: DEBUG oslo.service.loopingcall [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1707.118014] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1707.118277] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7892bb2c-78e4-4d01-ba51-d06fa1b10a56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.137689] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1707.137689] env[62820]: value = "task-1696113" [ 1707.137689] env[62820]: _type = "Task" [ 1707.137689] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.149921] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696113, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.198255] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696109, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060444} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.198596] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1707.199492] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dca3d68-4690-4ab3-b8bb-6b12ec376d6a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.221893] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369/9acf0d8f-2daa-4c3a-9ac0-a1be12e56369.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1707.225123] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fd902b1-bb51-41d8-bee0-88c10866df20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.247107] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b89ff1-2187-fc1a-5d22-e90c78bc11df, 'name': SearchDatastore_Task, 'duration_secs': 0.008976} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.248562] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.248850] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad/4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1707.249246] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1707.249246] env[62820]: value = "task-1696114" [ 1707.249246] env[62820]: _type = "Task" [ 1707.249246] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.249613] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4fda1c63-a4b2-4ec7-92fd-c47eb2168cc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.260245] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696114, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.261643] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1707.261643] env[62820]: value = "task-1696115" [ 1707.261643] env[62820]: _type = "Task" [ 1707.261643] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.269637] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696115, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.279277] env[62820]: DEBUG nova.compute.manager [req-8b5d5daa-932a-4d39-9235-70718c8f0079 req-84f7859b-7b48-4a9a-b692-94b93cd9b90e service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Received event network-changed-b2ce5106-4423-4570-ac4f-58a8fef758e7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1707.279277] env[62820]: DEBUG nova.compute.manager [req-8b5d5daa-932a-4d39-9235-70718c8f0079 req-84f7859b-7b48-4a9a-b692-94b93cd9b90e service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Refreshing instance network info cache due to event network-changed-b2ce5106-4423-4570-ac4f-58a8fef758e7. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1707.279277] env[62820]: DEBUG oslo_concurrency.lockutils [req-8b5d5daa-932a-4d39-9235-70718c8f0079 req-84f7859b-7b48-4a9a-b692-94b93cd9b90e service nova] Acquiring lock "refresh_cache-46217ada-3fab-4dbc-a65e-a3b8e856918d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.279942] env[62820]: DEBUG oslo_concurrency.lockutils [req-8b5d5daa-932a-4d39-9235-70718c8f0079 req-84f7859b-7b48-4a9a-b692-94b93cd9b90e service nova] Acquired lock "refresh_cache-46217ada-3fab-4dbc-a65e-a3b8e856918d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.279942] env[62820]: DEBUG nova.network.neutron [req-8b5d5daa-932a-4d39-9235-70718c8f0079 req-84f7859b-7b48-4a9a-b692-94b93cd9b90e service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Refreshing network info cache for port b2ce5106-4423-4570-ac4f-58a8fef758e7 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1707.377945] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696110, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.485956] env[62820]: INFO nova.compute.claims [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1707.492090] env[62820]: DEBUG oslo_concurrency.lockutils [None req-801eafe9-1888-4042-b9a6-6c6393cdf785 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.192s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.493128] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.931s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.493368] env[62820]: INFO nova.compute.manager [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Unshelving [ 1707.562306] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.648206] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696113, 'name': CreateVM_Task, 'duration_secs': 0.418858} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.648800] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1707.649645] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.649842] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.650190] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1707.650506] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72ed04d9-fc9e-4aec-a280-d8c1b8a346ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.655848] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1707.655848] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5279c2e9-b9d2-c3ee-ed42-73ee8933ba95" [ 1707.655848] env[62820]: _type = "Task" [ 1707.655848] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.664704] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5279c2e9-b9d2-c3ee-ed42-73ee8933ba95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.761821] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696114, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.770046] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696115, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499813} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.770344] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad/4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1707.770612] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1707.770867] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1dbc21f0-cd04-46e7-bff1-762d8ebf5a69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.777593] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1707.777593] env[62820]: value = "task-1696116" [ 1707.777593] env[62820]: _type = "Task" [ 1707.777593] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.786970] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696116, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.875842] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696110, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.994064] env[62820]: INFO nova.compute.resource_tracker [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating resource usage from migration 88e49372-0e6b-4197-a1c5-095a6ee63d37 [ 1708.066353] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.071552] env[62820]: DEBUG nova.network.neutron [req-8b5d5daa-932a-4d39-9235-70718c8f0079 req-84f7859b-7b48-4a9a-b692-94b93cd9b90e service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Updated VIF entry in instance network info cache for port b2ce5106-4423-4570-ac4f-58a8fef758e7. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1708.071893] env[62820]: DEBUG nova.network.neutron [req-8b5d5daa-932a-4d39-9235-70718c8f0079 req-84f7859b-7b48-4a9a-b692-94b93cd9b90e service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Updating instance_info_cache with network_info: [{"id": "b2ce5106-4423-4570-ac4f-58a8fef758e7", "address": "fa:16:3e:66:04:d0", "network": {"id": "1b14e126-1bde-4cfc-8919-bb2e37a1b9b7", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-323699130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2b10e16241b4d2ab29b164d08e0653d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ce5106-44", "ovs_interfaceid": "b2ce5106-4423-4570-ac4f-58a8fef758e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.172161] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5279c2e9-b9d2-c3ee-ed42-73ee8933ba95, 'name': SearchDatastore_Task, 'duration_secs': 0.056513} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.178902] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.178902] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1708.178902] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.178902] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.178902] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1708.178902] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46aa180a-ced3-4361-9e46-280cbd1f754b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.189237] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1708.189587] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1708.193250] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ebcb7a8-9bb2-4249-b4c9-0c3f1b670ad6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.199035] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1708.199035] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52464d50-33df-2834-d4ba-de25a67f4273" [ 1708.199035] env[62820]: _type = "Task" [ 1708.199035] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.207012] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52464d50-33df-2834-d4ba-de25a67f4273, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.262426] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696114, 'name': ReconfigVM_Task, 'duration_secs': 0.767333} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.264986] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369/9acf0d8f-2daa-4c3a-9ac0-a1be12e56369.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1708.265841] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92a9d3be-6a2d-408f-ba3a-27a7dd1c5f85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.272207] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1708.272207] env[62820]: value = "task-1696117" [ 1708.272207] env[62820]: _type = "Task" [ 1708.272207] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.276399] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0766c946-84b5-4e4c-a3a7-3a4c1704f400 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.284755] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696117, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.287559] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1305d738-ae8a-403c-977a-90848c491e78 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.293764] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696116, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073523} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.294343] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1708.295118] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bf7a27-1841-4d71-9c3f-1b26587d3d91 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.324063] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68ccf7d-8bb9-47ea-adf3-7d6418ef65d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.344815] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad/4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1708.345937] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a7b4530-6b5a-4f86-a402-47e8dd20fe13 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.364455] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c746ba-3bd6-4412-9702-1208db13a338 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.369680] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1708.369680] env[62820]: value = "task-1696118" [ 1708.369680] env[62820]: _type = "Task" [ 1708.369680] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.385451] env[62820]: DEBUG nova.compute.provider_tree [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.392431] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696110, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.395971] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.516314] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.566054] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.574830] env[62820]: DEBUG oslo_concurrency.lockutils [req-8b5d5daa-932a-4d39-9235-70718c8f0079 req-84f7859b-7b48-4a9a-b692-94b93cd9b90e service nova] Releasing lock "refresh_cache-46217ada-3fab-4dbc-a65e-a3b8e856918d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.710242] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52464d50-33df-2834-d4ba-de25a67f4273, 'name': SearchDatastore_Task, 'duration_secs': 0.048551} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.710976] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f4588b1-ad6f-425f-aa6f-e1a76c0d9987 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.716301] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1708.716301] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52daba29-d4ed-71e3-fc60-459c4a978ffc" [ 1708.716301] env[62820]: _type = "Task" [ 1708.716301] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.723945] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52daba29-d4ed-71e3-fc60-459c4a978ffc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.782349] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696117, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.883738] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696110, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.887191] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.888303] env[62820]: DEBUG nova.scheduler.client.report [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1709.067676] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.226934] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52daba29-d4ed-71e3-fc60-459c4a978ffc, 'name': SearchDatastore_Task, 'duration_secs': 0.010601} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.227388] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.227663] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 46217ada-3fab-4dbc-a65e-a3b8e856918d/46217ada-3fab-4dbc-a65e-a3b8e856918d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1709.228362] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5048fb9-1827-475c-aa38-702ea5c26636 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.234907] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1709.234907] env[62820]: value = "task-1696119" [ 1709.234907] env[62820]: _type = "Task" [ 1709.234907] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.243400] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696119, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.282618] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696117, 'name': Rename_Task, 'duration_secs': 0.580969} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.283220] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1709.283454] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f32ca29-a48f-44f4-829a-7e2f06c4d443 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.290459] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1709.290459] env[62820]: value = "task-1696120" [ 1709.290459] env[62820]: _type = "Task" [ 1709.290459] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.308226] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.381127] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696110, 'name': CloneVM_Task, 'duration_secs': 2.271026} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.381739] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Created linked-clone VM from snapshot [ 1709.382534] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f5252a-f9be-4576-b2c7-3df039950ae2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.388325] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696118, 'name': ReconfigVM_Task, 'duration_secs': 0.68401} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.388908] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad/4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1709.389577] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f642780-c9d8-484d-9fbf-bee08f34d08b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.394250] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.415s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.394438] env[62820]: INFO nova.compute.manager [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Migrating [ 1709.400887] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Uploading image 8cd3af99-9ba0-4075-b2da-e4bba65abf52 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1709.406424] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.890s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.406659] env[62820]: DEBUG nova.objects.instance [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lazy-loading 'pci_requests' on Instance uuid e420644c-cfcc-4f8c-ae03-c9ebef585690 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1709.417209] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1709.417209] env[62820]: value = "task-1696121" [ 1709.417209] env[62820]: _type = "Task" [ 1709.417209] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.430985] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696121, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.432959] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1709.432959] env[62820]: value = "vm-353636" [ 1709.432959] env[62820]: _type = "VirtualMachine" [ 1709.432959] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1709.433440] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1283a664-3f0a-413c-8c2a-01994ef507af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.441823] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease: (returnval){ [ 1709.441823] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bfd767-923c-7598-de56-0703a696f411" [ 1709.441823] env[62820]: _type = "HttpNfcLease" [ 1709.441823] env[62820]: } obtained for exporting VM: (result){ [ 1709.441823] env[62820]: value = "vm-353636" [ 1709.441823] env[62820]: _type = "VirtualMachine" [ 1709.441823] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1709.442242] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the lease: (returnval){ [ 1709.442242] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bfd767-923c-7598-de56-0703a696f411" [ 1709.442242] env[62820]: _type = "HttpNfcLease" [ 1709.442242] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1709.450529] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1709.450529] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bfd767-923c-7598-de56-0703a696f411" [ 1709.450529] env[62820]: _type = "HttpNfcLease" [ 1709.450529] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1709.569604] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.746520] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696119, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.801287] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696120, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.918614] env[62820]: DEBUG nova.objects.instance [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lazy-loading 'numa_topology' on Instance uuid e420644c-cfcc-4f8c-ae03-c9ebef585690 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1709.920941] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.921167] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.921382] env[62820]: DEBUG nova.network.neutron [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1709.935021] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696121, 'name': Rename_Task, 'duration_secs': 0.17134} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.935320] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1709.936122] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f68f352c-a427-42d0-9005-1b47dcffeb9d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.943935] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1709.943935] env[62820]: value = "task-1696123" [ 1709.943935] env[62820]: _type = "Task" [ 1709.943935] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.954672] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1709.954672] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bfd767-923c-7598-de56-0703a696f411" [ 1709.954672] env[62820]: _type = "HttpNfcLease" [ 1709.954672] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1709.957661] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1709.957661] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52bfd767-923c-7598-de56-0703a696f411" [ 1709.957661] env[62820]: _type = "HttpNfcLease" [ 1709.957661] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1709.958278] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696123, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.958985] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a68771b-fe13-49d6-902a-883ce89bbc92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.967136] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc3c2c-1eff-0773-dbd7-56f7775fe8b8/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1709.967325] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc3c2c-1eff-0773-dbd7-56f7775fe8b8/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1710.069012] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.099488] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fab060b3-46fa-4cc9-8978-fc50bf3c8d22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.247610] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696119, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68168} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.247936] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 46217ada-3fab-4dbc-a65e-a3b8e856918d/46217ada-3fab-4dbc-a65e-a3b8e856918d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1710.248757] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1710.248757] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6671b014-d63d-4256-a8d2-17b2ca4b7b05 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.257782] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1710.257782] env[62820]: value = "task-1696124" [ 1710.257782] env[62820]: _type = "Task" [ 1710.257782] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.269548] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696124, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.303261] env[62820]: DEBUG oslo_vmware.api [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696120, 'name': PowerOnVM_Task, 'duration_secs': 0.76613} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.303561] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1710.303780] env[62820]: INFO nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Took 10.41 seconds to spawn the instance on the hypervisor. [ 1710.303976] env[62820]: DEBUG nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1710.304837] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af552658-d2f7-4e45-b82d-41d932ff266b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.421600] env[62820]: INFO nova.compute.claims [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1710.459258] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696123, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.569735] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.769738] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696124, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075529} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.774010] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1710.774967] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03bebb7-aa9c-4642-b202-64df431e56d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.799610] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 46217ada-3fab-4dbc-a65e-a3b8e856918d/46217ada-3fab-4dbc-a65e-a3b8e856918d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1710.800949] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24b0b0ca-a81e-4bf9-8d42-3457f664b1ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.827824] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1710.827824] env[62820]: value = "task-1696125" [ 1710.827824] env[62820]: _type = "Task" [ 1710.827824] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.833585] env[62820]: INFO nova.compute.manager [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Took 20.85 seconds to build instance. [ 1710.842687] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696125, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.940094] env[62820]: DEBUG nova.network.neutron [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance_info_cache with network_info: [{"id": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "address": "fa:16:3e:a8:e9:1b", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8ab641-c9", "ovs_interfaceid": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.959747] env[62820]: DEBUG oslo_vmware.api [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696123, 'name': PowerOnVM_Task, 'duration_secs': 0.83871} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.960194] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1710.960561] env[62820]: INFO nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Took 8.58 seconds to spawn the instance on the hypervisor. [ 1710.960762] env[62820]: DEBUG nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1710.961617] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7fe20a-aa8c-40bf-841e-b18b4eb2f69a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.072405] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.335738] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1dd1d950-0abd-4227-a17c-e83b52b5835e tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.374s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.342832] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696125, 'name': ReconfigVM_Task, 'duration_secs': 0.284362} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.343355] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 46217ada-3fab-4dbc-a65e-a3b8e856918d/46217ada-3fab-4dbc-a65e-a3b8e856918d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1711.344152] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddb640a2-2245-4728-b15a-f718e6043c33 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.352687] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1711.352687] env[62820]: value = "task-1696126" [ 1711.352687] env[62820]: _type = "Task" [ 1711.352687] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.361591] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696126, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.442775] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.481817] env[62820]: INFO nova.compute.manager [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Took 14.58 seconds to build instance. [ 1711.573353] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.670815] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473e856b-f5f1-4ab6-9b35-54bd0f8084ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.680345] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6013528-30ad-43a0-94ff-351b0274bd99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.714689] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afd890a-e97a-487c-9683-6da1085bb7cc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.723274] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c4984c-6a5d-4ef6-a83e-15afc6048135 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.737440] env[62820]: DEBUG nova.compute.provider_tree [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1711.868744] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696126, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.985207] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7374ad70-a7c9-4e48-95e0-a64d9a86d48e tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.102s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.071231] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.144076] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.144504] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.144620] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.144763] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.144951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.147500] env[62820]: INFO nova.compute.manager [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Terminating instance [ 1712.240865] env[62820]: DEBUG nova.scheduler.client.report [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1712.364344] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696126, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.372583] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148ee432-0fe6-4a9c-b625-cea6799349cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.381190] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-50c28c40-1ce5-47d1-b20f-03722e3eb47c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Suspending the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1712.381491] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-388bcd9a-4816-4662-9076-d531ebb51ccd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.388758] env[62820]: DEBUG oslo_vmware.api [None req-50c28c40-1ce5-47d1-b20f-03722e3eb47c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1712.388758] env[62820]: value = "task-1696127" [ 1712.388758] env[62820]: _type = "Task" [ 1712.388758] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.397326] env[62820]: DEBUG oslo_vmware.api [None req-50c28c40-1ce5-47d1-b20f-03722e3eb47c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696127, 'name': SuspendVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.571132] env[62820]: DEBUG oslo_vmware.api [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696107, 'name': ReconfigVM_Task, 'duration_secs': 6.874081} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.571529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1712.571793] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Reconfigured VM to detach interface {{(pid=62820) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1712.651804] env[62820]: DEBUG nova.compute.manager [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1712.652041] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1712.652961] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd63b156-0972-4967-9119-97c28136577e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.661123] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1712.661386] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0078d79f-7025-46ee-9c77-d8bdffd36d61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.669119] env[62820]: DEBUG oslo_vmware.api [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1712.669119] env[62820]: value = "task-1696128" [ 1712.669119] env[62820]: _type = "Task" [ 1712.669119] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.678524] env[62820]: DEBUG oslo_vmware.api [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.746264] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.340s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.781640] env[62820]: INFO nova.network.neutron [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating port 8c81ac6d-fc1a-4519-81f6-1a3a523acee9 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1712.843912] env[62820]: DEBUG nova.compute.manager [req-dae7f51f-86b7-48be-954c-6ce91a7ecce5 req-37388649-3293-4985-8fde-e7b43bf5c861 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-vif-deleted-f0084819-f55d-4bd8-a480-72eab0bdd647 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1712.844192] env[62820]: INFO nova.compute.manager [req-dae7f51f-86b7-48be-954c-6ce91a7ecce5 req-37388649-3293-4985-8fde-e7b43bf5c861 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Neutron deleted interface f0084819-f55d-4bd8-a480-72eab0bdd647; detaching it from the instance and deleting it from the info cache [ 1712.844828] env[62820]: DEBUG nova.network.neutron [req-dae7f51f-86b7-48be-954c-6ce91a7ecce5 req-37388649-3293-4985-8fde-e7b43bf5c861 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8753570b-f8cd-4945-9a31-822b01c0c867", "address": "fa:16:3e:9d:1a:02", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8753570b-f8", "ovs_interfaceid": "8753570b-f8cd-4945-9a31-822b01c0c867", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.872035] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696126, 'name': Rename_Task, 'duration_secs': 1.131639} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.872035] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1712.872035] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4af5d4f5-20b9-478c-a32a-a054367138f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.880714] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1712.880714] env[62820]: value = "task-1696129" [ 1712.880714] env[62820]: _type = "Task" [ 1712.880714] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.894062] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696129, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.907774] env[62820]: DEBUG oslo_vmware.api [None req-50c28c40-1ce5-47d1-b20f-03722e3eb47c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696127, 'name': SuspendVM_Task} progress is 58%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.962018] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbb135f-f895-4d38-ae65-7b5b38cba896 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.986147] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance '860637a2-8c59-42af-a9f5-4e80c5466274' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1713.180949] env[62820]: DEBUG oslo_vmware.api [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696128, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.349072] env[62820]: DEBUG oslo_concurrency.lockutils [req-dae7f51f-86b7-48be-954c-6ce91a7ecce5 req-37388649-3293-4985-8fde-e7b43bf5c861 service nova] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.352047] env[62820]: DEBUG oslo_concurrency.lockutils [req-dae7f51f-86b7-48be-954c-6ce91a7ecce5 req-37388649-3293-4985-8fde-e7b43bf5c861 service nova] Acquired lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.352047] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93e0745-4911-463c-a4fe-37f089b47c1d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.376789] env[62820]: DEBUG oslo_concurrency.lockutils [req-dae7f51f-86b7-48be-954c-6ce91a7ecce5 req-37388649-3293-4985-8fde-e7b43bf5c861 service nova] Releasing lock "11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.376789] env[62820]: WARNING nova.compute.manager [req-dae7f51f-86b7-48be-954c-6ce91a7ecce5 req-37388649-3293-4985-8fde-e7b43bf5c861 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Detach interface failed, port_id=f0084819-f55d-4bd8-a480-72eab0bdd647, reason: No device with interface-id f0084819-f55d-4bd8-a480-72eab0bdd647 exists on VM: nova.exception.NotFound: No device with interface-id f0084819-f55d-4bd8-a480-72eab0bdd647 exists on VM [ 1713.394531] env[62820]: DEBUG oslo_vmware.api [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696129, 'name': PowerOnVM_Task, 'duration_secs': 0.487429} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.394531] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1713.398019] env[62820]: INFO nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1713.398019] env[62820]: DEBUG nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1713.398019] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9695411d-4d99-4849-b29f-44ef7c4ce913 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.413742] env[62820]: DEBUG oslo_vmware.api [None req-50c28c40-1ce5-47d1-b20f-03722e3eb47c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696127, 'name': SuspendVM_Task, 'duration_secs': 0.852133} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.414295] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-50c28c40-1ce5-47d1-b20f-03722e3eb47c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Suspended the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1713.414499] env[62820]: DEBUG nova.compute.manager [None req-50c28c40-1ce5-47d1-b20f-03722e3eb47c tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1713.415297] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dec0920-7e7f-4fd4-9341-ea164cfa865d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.492905] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1713.493248] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4cc9baa4-4e88-4a71-b2de-3c148c555533 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.502977] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1713.502977] env[62820]: value = "task-1696130" [ 1713.502977] env[62820]: _type = "Task" [ 1713.502977] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.512668] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696130, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.685023] env[62820]: DEBUG oslo_vmware.api [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696128, 'name': PowerOffVM_Task, 'duration_secs': 0.56613} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.685023] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.685023] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1713.685023] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff3dd39a-c07f-4f91-8abb-ff2ca4dc28f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.771230] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1713.771507] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1713.771773] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleting the datastore file [datastore1] 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1713.772363] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00d7564b-e53f-4c5f-83ed-0a609e50ad79 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.781760] env[62820]: DEBUG oslo_vmware.api [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1713.781760] env[62820]: value = "task-1696132" [ 1713.781760] env[62820]: _type = "Task" [ 1713.781760] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.791928] env[62820]: DEBUG oslo_vmware.api [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.810850] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.810964] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.811087] env[62820]: DEBUG nova.network.neutron [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1713.923771] env[62820]: INFO nova.compute.manager [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Took 17.02 seconds to build instance. [ 1714.014354] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696130, 'name': PowerOffVM_Task, 'duration_secs': 0.253235} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.014759] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1714.014948] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance '860637a2-8c59-42af-a9f5-4e80c5466274' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1714.292078] env[62820]: DEBUG oslo_vmware.api [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212086} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.292381] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1714.292567] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1714.292753] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1714.292966] env[62820]: INFO nova.compute.manager [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1714.293260] env[62820]: DEBUG oslo.service.loopingcall [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.293709] env[62820]: DEBUG nova.compute.manager [-] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1714.293709] env[62820]: DEBUG nova.network.neutron [-] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1714.299980] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquiring lock "46217ada-3fab-4dbc-a65e-a3b8e856918d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.356590] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.358135] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.358490] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.358781] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.358971] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.362825] env[62820]: INFO nova.compute.manager [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Terminating instance [ 1714.426236] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9e1b776f-090e-4d54-a6a4-a48bd18ef96a tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "46217ada-3fab-4dbc-a65e-a3b8e856918d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.536s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.426772] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "46217ada-3fab-4dbc-a65e-a3b8e856918d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.127s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.427326] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquiring lock "46217ada-3fab-4dbc-a65e-a3b8e856918d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.427555] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "46217ada-3fab-4dbc-a65e-a3b8e856918d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.427728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "46217ada-3fab-4dbc-a65e-a3b8e856918d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.429955] env[62820]: INFO nova.compute.manager [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Terminating instance [ 1714.521778] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1714.522122] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1714.522662] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1714.522662] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1714.522662] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1714.522825] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1714.522993] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1714.524507] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1714.524507] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1714.524507] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1714.524507] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1714.531951] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89efb664-9e3c-4712-ba67-011b8c52d653 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.554913] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1714.554913] env[62820]: value = "task-1696133" [ 1714.554913] env[62820]: _type = "Task" [ 1714.554913] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.564763] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696133, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.709790] env[62820]: DEBUG nova.compute.manager [req-c3c2f48f-4f8d-45f4-9cd4-cf891fcfb548 req-fcd3cc94-dfa0-46e1-8880-5a8109c18e08 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received event network-vif-plugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1714.710034] env[62820]: DEBUG oslo_concurrency.lockutils [req-c3c2f48f-4f8d-45f4-9cd4-cf891fcfb548 req-fcd3cc94-dfa0-46e1-8880-5a8109c18e08 service nova] Acquiring lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.710260] env[62820]: DEBUG oslo_concurrency.lockutils [req-c3c2f48f-4f8d-45f4-9cd4-cf891fcfb548 req-fcd3cc94-dfa0-46e1-8880-5a8109c18e08 service nova] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.710440] env[62820]: DEBUG oslo_concurrency.lockutils [req-c3c2f48f-4f8d-45f4-9cd4-cf891fcfb548 req-fcd3cc94-dfa0-46e1-8880-5a8109c18e08 service nova] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.710673] env[62820]: DEBUG nova.compute.manager [req-c3c2f48f-4f8d-45f4-9cd4-cf891fcfb548 req-fcd3cc94-dfa0-46e1-8880-5a8109c18e08 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] No waiting events found dispatching network-vif-plugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1714.710778] env[62820]: WARNING nova.compute.manager [req-c3c2f48f-4f8d-45f4-9cd4-cf891fcfb548 req-fcd3cc94-dfa0-46e1-8880-5a8109c18e08 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received unexpected event network-vif-plugged-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 for instance with vm_state shelved_offloaded and task_state spawning. [ 1714.720534] env[62820]: INFO nova.network.neutron [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Port 8753570b-f8cd-4945-9a31-822b01c0c867 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1714.720886] env[62820]: DEBUG nova.network.neutron [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [{"id": "e8df5c6d-470d-4740-947e-1652ee33a75f", "address": "fa:16:3e:1d:ba:da", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8df5c6d-47", "ovs_interfaceid": "e8df5c6d-470d-4740-947e-1652ee33a75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.815631] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.815752] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.815931] env[62820]: DEBUG nova.network.neutron [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1714.869595] env[62820]: DEBUG nova.compute.manager [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1714.870311] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1714.871775] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faf6b48-f15a-4867-8fad-cdb18c817b40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.882474] env[62820]: DEBUG nova.compute.manager [req-50d7ccd5-8dd1-439d-82cb-172bfb3dcbf2 req-65b257c4-9f90-43c1-856a-13e78af4db0e service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-vif-deleted-8753570b-f8cd-4945-9a31-822b01c0c867 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1714.882831] env[62820]: DEBUG nova.compute.manager [req-50d7ccd5-8dd1-439d-82cb-172bfb3dcbf2 req-65b257c4-9f90-43c1-856a-13e78af4db0e service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Received event network-vif-deleted-34e8df14-7464-4d07-81cb-333b36342136 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1714.883134] env[62820]: INFO nova.compute.manager [req-50d7ccd5-8dd1-439d-82cb-172bfb3dcbf2 req-65b257c4-9f90-43c1-856a-13e78af4db0e service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Neutron deleted interface 34e8df14-7464-4d07-81cb-333b36342136; detaching it from the instance and deleting it from the info cache [ 1714.883421] env[62820]: DEBUG nova.network.neutron [req-50d7ccd5-8dd1-439d-82cb-172bfb3dcbf2 req-65b257c4-9f90-43c1-856a-13e78af4db0e service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.891924] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1714.892306] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceef7908-24a8-433c-935c-78ffb485fa01 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.898306] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.898664] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.898987] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.899325] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.899617] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.903741] env[62820]: DEBUG oslo_vmware.api [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1714.903741] env[62820]: value = "task-1696134" [ 1714.903741] env[62820]: _type = "Task" [ 1714.903741] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.904340] env[62820]: INFO nova.compute.manager [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Terminating instance [ 1714.921180] env[62820]: DEBUG oslo_vmware.api [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.937176] env[62820]: DEBUG nova.compute.manager [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1714.937176] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1714.938533] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537e5608-08d2-4518-b7a4-09e3d6505950 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.950283] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1714.950690] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23150ce6-779b-4f66-ad69-09c144ae4e60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.962992] env[62820]: DEBUG oslo_vmware.api [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1714.962992] env[62820]: value = "task-1696135" [ 1714.962992] env[62820]: _type = "Task" [ 1714.962992] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.975812] env[62820]: DEBUG oslo_vmware.api [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.066497] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696133, 'name': ReconfigVM_Task, 'duration_secs': 0.38958} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.066851] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance '860637a2-8c59-42af-a9f5-4e80c5466274' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1715.070553] env[62820]: DEBUG nova.network.neutron [-] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.223218] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-11843b38-3ce4-42a7-b855-a9d0b473e796" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.389064] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2381a9ba-0f4a-4f1c-80ba-8da494fe1c39 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.399722] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caae3e57-ade8-4939-9c23-090d915f7b1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.416316] env[62820]: DEBUG nova.compute.manager [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1715.416569] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1715.417352] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8303a0-f208-4750-8ed8-88b964ebff5c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.429462] env[62820]: DEBUG oslo_vmware.api [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696134, 'name': PowerOffVM_Task, 'duration_secs': 0.230133} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.429731] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1715.443990] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1715.443990] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1715.444180] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5b186dc-b303-4646-8a0e-9287f7f88848 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.445891] env[62820]: DEBUG nova.compute.manager [req-50d7ccd5-8dd1-439d-82cb-172bfb3dcbf2 req-65b257c4-9f90-43c1-856a-13e78af4db0e service nova] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Detach interface failed, port_id=34e8df14-7464-4d07-81cb-333b36342136, reason: Instance 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1715.448536] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1475ff02-3439-4ddf-b749-c3c36ed58297 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.480893] env[62820]: DEBUG oslo_vmware.api [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696135, 'name': PowerOffVM_Task, 'duration_secs': 0.263048} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.481193] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1715.481371] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1715.481702] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69880f51-5881-4402-a10c-fecd4b749e6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.574417] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1715.578076] env[62820]: DEBUG nova.virt.hardware [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1715.583547] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1715.583854] env[62820]: INFO nova.compute.manager [-] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Took 1.29 seconds to deallocate network for instance. [ 1715.585297] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e808fe3a-cb23-42be-999a-2f14963c0a95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.602033] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1715.602263] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1715.602375] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleting the datastore file [datastore1] 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.604044] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9ae3e0b-be5c-47ed-88d0-80180f9b1d74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.610496] env[62820]: DEBUG nova.network.neutron [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [{"id": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "address": "fa:16:3e:ed:27:27", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c81ac6d-fc", "ovs_interfaceid": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.614609] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1715.614871] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1715.615351] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Deleting the datastore file [datastore1] 46217ada-3fab-4dbc-a65e-a3b8e856918d {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.615818] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1715.615818] env[62820]: value = "task-1696139" [ 1715.615818] env[62820]: _type = "Task" [ 1715.615818] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.616497] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93967539-040c-4d98-80be-06606cb41d27 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.624254] env[62820]: DEBUG oslo_vmware.api [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1715.624254] env[62820]: value = "task-1696140" [ 1715.624254] env[62820]: _type = "Task" [ 1715.624254] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.634474] env[62820]: DEBUG oslo_vmware.api [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for the task: (returnval){ [ 1715.634474] env[62820]: value = "task-1696141" [ 1715.634474] env[62820]: _type = "Task" [ 1715.634474] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.634723] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696139, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.643866] env[62820]: DEBUG oslo_vmware.api [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.650279] env[62820]: DEBUG oslo_vmware.api [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.727410] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0178226d-3d73-4916-831c-9b66829555c6 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-11843b38-3ce4-42a7-b855-a9d0b473e796-f0084819-f55d-4bd8-a480-72eab0bdd647" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.756s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.757249] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1715.757337] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1715.757491] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleting the datastore file [datastore1] 11843b38-3ce4-42a7-b855-a9d0b473e796 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.757835] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80bab15e-1177-434c-9b1b-1d02117feb47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.767315] env[62820]: DEBUG oslo_vmware.api [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1715.767315] env[62820]: value = "task-1696142" [ 1715.767315] env[62820]: _type = "Task" [ 1715.767315] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.776239] env[62820]: DEBUG oslo_vmware.api [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696142, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.116892] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.120162] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.121353] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.121353] env[62820]: DEBUG nova.objects.instance [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lazy-loading 'resources' on Instance uuid 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1716.137403] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696139, 'name': ReconfigVM_Task, 'duration_secs': 0.273933} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.144219] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1716.144918] env[62820]: DEBUG oslo_vmware.api [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231302} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.147903] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae66ea9a-8628-4af1-9dc1-50727227e969 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.151976] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1716.152204] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1716.152382] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1716.152556] env[62820]: INFO nova.compute.manager [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Took 0.74 seconds to destroy the instance on the hypervisor. [ 1716.152799] env[62820]: DEBUG oslo.service.loopingcall [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.153665] env[62820]: DEBUG nova.compute.manager [-] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1716.153780] env[62820]: DEBUG nova.network.neutron [-] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1716.160474] env[62820]: DEBUG oslo_vmware.api [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Task: {'id': task-1696141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.233114} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.174328] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='199427852772e4f250193a920288d5a4',container_format='bare',created_at=2024-12-10T16:54:16Z,direct_url=,disk_format='vmdk',id=1ad372de-b4a3-441d-b9c8-61354d703fed,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1244157950-shelved',owner='14788b1c55684c2fbd3c07bff18757f9',properties=ImageMetaProps,protected=,size=31590400,status='active',tags=,updated_at=2024-12-10T16:54:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1716.174593] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1716.174913] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1716.175191] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1716.175383] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1716.175599] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1716.175833] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1716.175994] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1716.176183] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1716.176376] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1716.176565] env[62820]: DEBUG nova.virt.hardware [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1716.176903] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1716.177113] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1716.177364] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1716.177469] env[62820]: INFO nova.compute.manager [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1716.177734] env[62820]: DEBUG oslo.service.loopingcall [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.185235] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 860637a2-8c59-42af-a9f5-4e80c5466274/860637a2-8c59-42af-a9f5-4e80c5466274.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1716.187607] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37a1909-0b14-4036-abc1-9fc9943d4c3a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.190151] env[62820]: DEBUG nova.compute.manager [-] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1716.190273] env[62820]: DEBUG nova.network.neutron [-] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1716.194921] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fce80c6a-b215-4a52-adec-eea652be4c30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.214520] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257a5770-9612-4838-a447-111f36b3a91a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.220385] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1716.220385] env[62820]: value = "task-1696143" [ 1716.220385] env[62820]: _type = "Task" [ 1716.220385] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.233307] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:27:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c81ac6d-fc1a-4519-81f6-1a3a523acee9', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1716.241704] env[62820]: DEBUG oslo.service.loopingcall [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.242496] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1716.242765] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3eb705e-1c29-401c-b0bb-9f5bb101f74a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.261395] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696143, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.267445] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1716.267445] env[62820]: value = "task-1696144" [ 1716.267445] env[62820]: _type = "Task" [ 1716.267445] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.284497] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696144, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.285168] env[62820]: DEBUG oslo_vmware.api [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696142, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251254} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.285423] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1716.285593] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1716.285770] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1716.285944] env[62820]: INFO nova.compute.manager [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Took 1.42 seconds to destroy the instance on the hypervisor. [ 1716.286222] env[62820]: DEBUG oslo.service.loopingcall [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.288618] env[62820]: DEBUG nova.compute.manager [-] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1716.288727] env[62820]: DEBUG nova.network.neutron [-] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1716.376281] env[62820]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 8753570b-f8cd-4945-9a31-822b01c0c867 could not be found.", "detail": ""}} {{(pid=62820) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1716.376281] env[62820]: DEBUG nova.network.neutron [-] Unable to show port 8753570b-f8cd-4945-9a31-822b01c0c867 as it no longer exists. {{(pid=62820) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1716.608411] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquiring lock "7a755ef6-67bc-4242-9343-c54c8566adf8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.608795] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "7a755ef6-67bc-4242-9343-c54c8566adf8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.608986] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquiring lock "7a755ef6-67bc-4242-9343-c54c8566adf8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.609240] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "7a755ef6-67bc-4242-9343-c54c8566adf8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.609444] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "7a755ef6-67bc-4242-9343-c54c8566adf8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.611672] env[62820]: INFO nova.compute.manager [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Terminating instance [ 1716.736802] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696143, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.772608] env[62820]: DEBUG nova.compute.manager [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received event network-changed-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1716.772608] env[62820]: DEBUG nova.compute.manager [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Refreshing instance network info cache due to event network-changed-8c81ac6d-fc1a-4519-81f6-1a3a523acee9. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1716.772608] env[62820]: DEBUG oslo_concurrency.lockutils [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] Acquiring lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.772608] env[62820]: DEBUG oslo_concurrency.lockutils [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] Acquired lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.772608] env[62820]: DEBUG nova.network.neutron [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Refreshing network info cache for port 8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1716.782228] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696144, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.943259] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b3b129-b932-4c67-a33f-b4a9f3bed29d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.954659] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c7f1ad-3708-4ab5-a970-e0aa362b5b8e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.995269] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee90d97d-b3d6-43af-b24a-9ada9f07d223 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.999223] env[62820]: DEBUG nova.compute.manager [req-56739afa-8a68-47c5-a9a5-09a8443e8880 req-3132412f-057d-4c95-b95d-0ccadb9af131 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Received event network-vif-deleted-e8df5c6d-470d-4740-947e-1652ee33a75f {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1716.999422] env[62820]: INFO nova.compute.manager [req-56739afa-8a68-47c5-a9a5-09a8443e8880 req-3132412f-057d-4c95-b95d-0ccadb9af131 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Neutron deleted interface e8df5c6d-470d-4740-947e-1652ee33a75f; detaching it from the instance and deleting it from the info cache [ 1716.999599] env[62820]: DEBUG nova.network.neutron [req-56739afa-8a68-47c5-a9a5-09a8443e8880 req-3132412f-057d-4c95-b95d-0ccadb9af131 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.008252] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1db58a-e6eb-482f-abf8-34a8452697d1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.026294] env[62820]: DEBUG nova.compute.provider_tree [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1717.116552] env[62820]: DEBUG nova.compute.manager [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1717.116790] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1717.117688] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64976f3-73c0-4f63-b744-478195d7c066 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.126526] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1717.128114] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb9ed908-2372-4869-aa9b-5ba77b361c8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.133589] env[62820]: DEBUG nova.network.neutron [-] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.135466] env[62820]: DEBUG oslo_vmware.api [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1717.135466] env[62820]: value = "task-1696145" [ 1717.135466] env[62820]: _type = "Task" [ 1717.135466] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.141081] env[62820]: DEBUG nova.network.neutron [-] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.148174] env[62820]: DEBUG oslo_vmware.api [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1696145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.231625] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696143, 'name': ReconfigVM_Task, 'duration_secs': 0.526901} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.231821] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 860637a2-8c59-42af-a9f5-4e80c5466274/860637a2-8c59-42af-a9f5-4e80c5466274.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1717.232113] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance '860637a2-8c59-42af-a9f5-4e80c5466274' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1717.280098] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696144, 'name': CreateVM_Task, 'duration_secs': 0.629802} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.280309] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1717.281143] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.281374] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.281816] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1717.282144] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3923040-ad02-48db-a2a6-3ce8a779665a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.289320] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1717.289320] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528439e4-519e-1be2-b208-c8475ee8fe25" [ 1717.289320] env[62820]: _type = "Task" [ 1717.289320] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.301010] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528439e4-519e-1be2-b208-c8475ee8fe25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.448583] env[62820]: DEBUG nova.network.neutron [-] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.502675] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4bfcfee-27c1-4d04-b2a8-fa8c3effd393 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.513383] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd26d99a-d0f0-4296-bf18-6b93c31c9514 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.529736] env[62820]: DEBUG nova.scheduler.client.report [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1717.537766] env[62820]: DEBUG nova.network.neutron [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updated VIF entry in instance network info cache for port 8c81ac6d-fc1a-4519-81f6-1a3a523acee9. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1717.538128] env[62820]: DEBUG nova.network.neutron [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [{"id": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "address": "fa:16:3e:ed:27:27", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c81ac6d-fc", "ovs_interfaceid": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.554264] env[62820]: DEBUG nova.compute.manager [req-56739afa-8a68-47c5-a9a5-09a8443e8880 req-3132412f-057d-4c95-b95d-0ccadb9af131 service nova] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Detach interface failed, port_id=e8df5c6d-470d-4740-947e-1652ee33a75f, reason: Instance 11843b38-3ce4-42a7-b855-a9d0b473e796 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1717.635875] env[62820]: INFO nova.compute.manager [-] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Took 1.45 seconds to deallocate network for instance. [ 1717.644525] env[62820]: INFO nova.compute.manager [-] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Took 1.49 seconds to deallocate network for instance. [ 1717.652399] env[62820]: DEBUG oslo_vmware.api [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1696145, 'name': PowerOffVM_Task, 'duration_secs': 0.237209} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.652935] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1717.653147] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1717.653421] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-182d0eaa-4f27-4e64-90e7-8c55647b68da {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.735103] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1717.735360] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1717.735609] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Deleting the datastore file [datastore1] 7a755ef6-67bc-4242-9343-c54c8566adf8 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1717.737679] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07bb58b5-2884-4dbc-85c1-01b722a4a611 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.741086] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cdd4da-b74d-403c-b58a-878009c27a00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.765843] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b203ac-415c-4715-97eb-f841c9a0033e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.768697] env[62820]: DEBUG oslo_vmware.api [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for the task: (returnval){ [ 1717.768697] env[62820]: value = "task-1696147" [ 1717.768697] env[62820]: _type = "Task" [ 1717.768697] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.785933] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance '860637a2-8c59-42af-a9f5-4e80c5466274' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1717.798705] env[62820]: DEBUG oslo_vmware.api [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1696147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.805358] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.805636] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Processing image 1ad372de-b4a3-441d-b9c8-61354d703fed {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1717.805897] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed/1ad372de-b4a3-441d-b9c8-61354d703fed.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.806084] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed/1ad372de-b4a3-441d-b9c8-61354d703fed.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.806251] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1717.806661] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f23cb87f-5761-4b01-87b6-d020565cee55 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.817079] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1717.818056] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1717.818056] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b66650d9-4f16-41ac-afb4-484f3b7cd74f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.825185] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1717.825185] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526cff6d-a4c2-1347-ccca-e1e953eb16a8" [ 1717.825185] env[62820]: _type = "Task" [ 1717.825185] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.836204] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526cff6d-a4c2-1347-ccca-e1e953eb16a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.951359] env[62820]: INFO nova.compute.manager [-] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Took 1.66 seconds to deallocate network for instance. [ 1718.037733] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.917s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.040961] env[62820]: DEBUG oslo_concurrency.lockutils [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] Releasing lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.040961] env[62820]: DEBUG nova.compute.manager [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Received event network-vif-deleted-b2ce5106-4423-4570-ac4f-58a8fef758e7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1718.041132] env[62820]: INFO nova.compute.manager [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Neutron deleted interface b2ce5106-4423-4570-ac4f-58a8fef758e7; detaching it from the instance and deleting it from the info cache [ 1718.041357] env[62820]: DEBUG nova.network.neutron [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.063730] env[62820]: INFO nova.scheduler.client.report [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted allocations for instance 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369 [ 1718.146517] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.146808] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.147033] env[62820]: DEBUG nova.objects.instance [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lazy-loading 'resources' on Instance uuid 46217ada-3fab-4dbc-a65e-a3b8e856918d {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1718.153784] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.279534] env[62820]: DEBUG oslo_vmware.api [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Task: {'id': task-1696147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23687} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.279770] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1718.279967] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1718.280162] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1718.280366] env[62820]: INFO nova.compute.manager [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1718.280630] env[62820]: DEBUG oslo.service.loopingcall [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1718.280836] env[62820]: DEBUG nova.compute.manager [-] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1718.280933] env[62820]: DEBUG nova.network.neutron [-] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1718.326519] env[62820]: DEBUG nova.network.neutron [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Port 2e8ab641-c961-452e-a6eb-d760374ac2b2 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1718.338888] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Preparing fetch location {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1718.339205] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Fetch image to [datastore1] OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c/OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c.vmdk {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1718.339396] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Downloading stream optimized image 1ad372de-b4a3-441d-b9c8-61354d703fed to [datastore1] OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c/OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c.vmdk on the data store datastore1 as vApp {{(pid=62820) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1718.339567] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Downloading image file data 1ad372de-b4a3-441d-b9c8-61354d703fed to the ESX as VM named 'OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c' {{(pid=62820) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1718.416513] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1718.416513] env[62820]: value = "resgroup-9" [ 1718.416513] env[62820]: _type = "ResourcePool" [ 1718.416513] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1718.416813] env[62820]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-57fc17a6-28ab-433e-b959-9b78d10e1725 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.439717] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lease: (returnval){ [ 1718.439717] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524ce378-cd79-9dbc-fa59-aec6f2d1f875" [ 1718.439717] env[62820]: _type = "HttpNfcLease" [ 1718.439717] env[62820]: } obtained for vApp import into resource pool (val){ [ 1718.439717] env[62820]: value = "resgroup-9" [ 1718.439717] env[62820]: _type = "ResourcePool" [ 1718.439717] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1718.439971] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the lease: (returnval){ [ 1718.439971] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524ce378-cd79-9dbc-fa59-aec6f2d1f875" [ 1718.439971] env[62820]: _type = "HttpNfcLease" [ 1718.439971] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1718.448486] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1718.448486] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524ce378-cd79-9dbc-fa59-aec6f2d1f875" [ 1718.448486] env[62820]: _type = "HttpNfcLease" [ 1718.448486] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1718.461023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.544319] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29457fda-aba5-402a-b8ee-f351213b0f0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.556128] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16624f56-3aab-4e43-ae0a-17f9d6f5e7f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.573456] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fdd2a77d-297e-42b8-aae6-4bd7e322a473 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "9acf0d8f-2daa-4c3a-9ac0-a1be12e56369" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.429s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.595035] env[62820]: DEBUG nova.compute.manager [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Detach interface failed, port_id=b2ce5106-4423-4570-ac4f-58a8fef758e7, reason: Instance 46217ada-3fab-4dbc-a65e-a3b8e856918d could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1718.595302] env[62820]: DEBUG nova.compute.manager [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Received event network-vif-deleted-af0b710a-f322-429b-9530-abac794409f9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1718.595485] env[62820]: INFO nova.compute.manager [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Neutron deleted interface af0b710a-f322-429b-9530-abac794409f9; detaching it from the instance and deleting it from the info cache [ 1718.595659] env[62820]: DEBUG nova.network.neutron [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.934298] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e9e3c6-ed2b-46d5-a443-b167ed64d026 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.950362] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd84d2f-3dd6-45ad-80c1-39526f13c100 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.960352] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1718.960352] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524ce378-cd79-9dbc-fa59-aec6f2d1f875" [ 1718.960352] env[62820]: _type = "HttpNfcLease" [ 1718.960352] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1718.989029] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06846b35-4eca-4740-8a43-96853ac4de19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.997622] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bda3eb-be8e-43b8-9a7a-3c983491c02a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.012846] env[62820]: DEBUG nova.compute.provider_tree [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.086055] env[62820]: DEBUG nova.compute.manager [req-448c1065-01a5-45bc-8bee-967b59817bb1 req-bc3e57d0-9d7b-4c6b-b943-df9523b0f687 service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Received event network-vif-deleted-0c860660-820e-425d-963c-906681be61b6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1719.086482] env[62820]: INFO nova.compute.manager [req-448c1065-01a5-45bc-8bee-967b59817bb1 req-bc3e57d0-9d7b-4c6b-b943-df9523b0f687 service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Neutron deleted interface 0c860660-820e-425d-963c-906681be61b6; detaching it from the instance and deleting it from the info cache [ 1719.086703] env[62820]: DEBUG nova.network.neutron [req-448c1065-01a5-45bc-8bee-967b59817bb1 req-bc3e57d0-9d7b-4c6b-b943-df9523b0f687 service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.098442] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f566966-0457-40e3-a052-43c7244b4212 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.112137] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25095ce3-7fac-4591-ba98-909a74943c5f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.150500] env[62820]: DEBUG nova.compute.manager [req-3f6bda27-77f9-46ed-8af3-64d32a16c415 req-33eeabe4-8398-4491-a4c2-8a7eaedd5df1 service nova] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Detach interface failed, port_id=af0b710a-f322-429b-9530-abac794409f9, reason: Instance 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1719.274855] env[62820]: DEBUG nova.network.neutron [-] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.351039] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.351039] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.351039] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.454863] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1719.454863] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524ce378-cd79-9dbc-fa59-aec6f2d1f875" [ 1719.454863] env[62820]: _type = "HttpNfcLease" [ 1719.454863] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1719.516463] env[62820]: DEBUG nova.scheduler.client.report [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1719.590656] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b9815ab-b029-4e63-b4dc-b34178190435 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.600832] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c0cd2e-fd60-4ce7-8ed9-1881134dd0f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.636504] env[62820]: DEBUG nova.compute.manager [req-448c1065-01a5-45bc-8bee-967b59817bb1 req-bc3e57d0-9d7b-4c6b-b943-df9523b0f687 service nova] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Detach interface failed, port_id=0c860660-820e-425d-963c-906681be61b6, reason: Instance 7a755ef6-67bc-4242-9343-c54c8566adf8 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1719.770156] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc3c2c-1eff-0773-dbd7-56f7775fe8b8/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1719.771113] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1f33a6-aa98-4e3f-bdf5-20e7e9f24192 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.777362] env[62820]: INFO nova.compute.manager [-] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Took 1.50 seconds to deallocate network for instance. [ 1719.782586] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc3c2c-1eff-0773-dbd7-56f7775fe8b8/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1719.782586] env[62820]: ERROR oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc3c2c-1eff-0773-dbd7-56f7775fe8b8/disk-0.vmdk due to incomplete transfer. [ 1719.786345] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ded441ee-903e-407e-9bfd-66d13cb64ce9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.797165] env[62820]: DEBUG oslo_vmware.rw_handles [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc3c2c-1eff-0773-dbd7-56f7775fe8b8/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1719.797383] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Uploaded image 8cd3af99-9ba0-4075-b2da-e4bba65abf52 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1719.799766] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1719.800268] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4490ec37-79b4-4006-9e15-8167d223d352 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.810073] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1719.810073] env[62820]: value = "task-1696149" [ 1719.810073] env[62820]: _type = "Task" [ 1719.810073] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.820634] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696149, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.955888] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1719.955888] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524ce378-cd79-9dbc-fa59-aec6f2d1f875" [ 1719.955888] env[62820]: _type = "HttpNfcLease" [ 1719.955888] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1719.956207] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1719.956207] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524ce378-cd79-9dbc-fa59-aec6f2d1f875" [ 1719.956207] env[62820]: _type = "HttpNfcLease" [ 1719.956207] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1719.956765] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa9d27a-1cd4-4e4a-87bd-1c04f305f780 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.965032] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f32488-9015-1be3-eeef-31842c87855f/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1719.965226] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating HTTP connection to write to file with size = 31590400 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f32488-9015-1be3-eeef-31842c87855f/disk-0.vmdk. {{(pid=62820) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1720.023802] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.876s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.027136] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.873s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.027136] env[62820]: DEBUG nova.objects.instance [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'resources' on Instance uuid 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1720.033898] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-96fd02da-cb46-4b7f-bb5c-d5a877b883fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.049734] env[62820]: INFO nova.scheduler.client.report [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Deleted allocations for instance 46217ada-3fab-4dbc-a65e-a3b8e856918d [ 1720.279267] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.279576] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.279763] env[62820]: INFO nova.compute.manager [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Shelving [ 1720.291231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.322429] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696149, 'name': Destroy_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.365053] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.365454] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.394974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.395175] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.395357] env[62820]: DEBUG nova.network.neutron [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1720.559580] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1c2cec12-f3db-41b3-b3b8-70cbe8520aa1 tempest-InstanceActionsNegativeTestJSON-1971675410 tempest-InstanceActionsNegativeTestJSON-1971675410-project-member] Lock "46217ada-3fab-4dbc-a65e-a3b8e856918d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.133s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.826372] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696149, 'name': Destroy_Task, 'duration_secs': 0.798272} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.826655] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Destroyed the VM [ 1720.826889] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1720.827167] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-03608efa-7b39-412f-991d-15bf0a008ccf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.836465] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1720.836465] env[62820]: value = "task-1696150" [ 1720.836465] env[62820]: _type = "Task" [ 1720.836465] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.847463] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05cc7efb-1639-4d03-a9f8-4be683bebd2e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.857144] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696150, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.865682] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bd862c-3d35-4579-84c8-c293f658a9ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.872022] env[62820]: DEBUG nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1720.908024] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc09535-ff0d-4d26-856b-bfb2ae1d62a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.918894] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ddd5c56-2dee-47b7-9428-252bede3546e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.936967] env[62820]: DEBUG nova.compute.provider_tree [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.291992] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1721.292336] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a59e7fa-dd14-49db-8f6a-d4db01a42a9f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.301876] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1721.301876] env[62820]: value = "task-1696151" [ 1721.301876] env[62820]: _type = "Task" [ 1721.301876] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.315745] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.349186] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696150, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.394853] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.398574] env[62820]: DEBUG nova.network.neutron [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance_info_cache with network_info: [{"id": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "address": "fa:16:3e:a8:e9:1b", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8ab641-c9", "ovs_interfaceid": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1721.443559] env[62820]: DEBUG nova.scheduler.client.report [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1721.617066] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Completed reading data from the image iterator. {{(pid=62820) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1721.617066] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f32488-9015-1be3-eeef-31842c87855f/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1721.617066] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448248af-2ca4-48cb-a631-259ee7a13a1b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.624930] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f32488-9015-1be3-eeef-31842c87855f/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1721.625252] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f32488-9015-1be3-eeef-31842c87855f/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1721.625573] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-2c065796-ee6e-41fd-aff0-a4c271915bab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.813988] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696151, 'name': PowerOffVM_Task, 'duration_secs': 0.266126} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.813988] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1721.814918] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b9489c-5883-4e52-920f-f28a0f159147 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.836338] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d7ee32-75dd-4182-9caf-261396063aae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.844639] env[62820]: DEBUG oslo_vmware.rw_handles [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f32488-9015-1be3-eeef-31842c87855f/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1721.844931] env[62820]: INFO nova.virt.vmwareapi.images [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Downloaded image file data 1ad372de-b4a3-441d-b9c8-61354d703fed [ 1721.848341] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7d3710-dae5-41e4-95a5-5d9ef514f685 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.858383] env[62820]: DEBUG oslo_vmware.api [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696150, 'name': RemoveSnapshot_Task, 'duration_secs': 0.700424} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.858383] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1721.858383] env[62820]: INFO nova.compute.manager [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Took 17.12 seconds to snapshot the instance on the hypervisor. [ 1721.872912] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11c44652-66e9-4777-88a6-4e2180081079 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.902015] env[62820]: DEBUG oslo_concurrency.lockutils [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.906110] env[62820]: INFO nova.virt.vmwareapi.images [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] The imported VM was unregistered [ 1721.908703] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Caching image {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1721.908942] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Creating directory with path [datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1721.909727] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48791017-a310-453d-8ae8-2bf0256219cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.922060] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Created directory with path [datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1721.922271] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c/OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c.vmdk to [datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed/1ad372de-b4a3-441d-b9c8-61354d703fed.vmdk. {{(pid=62820) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1721.922526] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-91a444cc-7d98-4864-b797-17a463b7f6d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.932108] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1721.932108] env[62820]: value = "task-1696153" [ 1721.932108] env[62820]: _type = "Task" [ 1721.932108] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.942215] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696153, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.949156] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.951773] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.493s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.951845] env[62820]: DEBUG nova.objects.instance [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'resources' on Instance uuid 11843b38-3ce4-42a7-b855-a9d0b473e796 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1721.988764] env[62820]: INFO nova.scheduler.client.report [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted allocations for instance 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad [ 1722.361722] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1722.361722] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-94bf8bea-a8e0-4fc4-a80a-92e0df291fe1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.378435] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1722.378435] env[62820]: value = "task-1696154" [ 1722.378435] env[62820]: _type = "Task" [ 1722.378435] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.399974] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696154, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.442525] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9f674e-79b8-4234-81c1-42f8cfbcc859 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.458091] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696153, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.484249] env[62820]: DEBUG nova.compute.manager [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Found 3 images (rotation: 2) {{(pid=62820) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4898}} [ 1722.484249] env[62820]: DEBUG nova.compute.manager [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Rotating out 1 backups {{(pid=62820) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4906}} [ 1722.484320] env[62820]: DEBUG nova.compute.manager [None req-dfd7b63e-2771-40e1-b9aa-c48057bf1abf tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleting image b5637a72-690d-4ce3-99c3-dc6f93341a35 {{(pid=62820) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4911}} [ 1722.490017] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede94448-7476-4ab3-a73f-c3ec474b0f55 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.499024] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab973a4c-4046-41eb-9748-413daeabd9d4 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.597s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.504871] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance '860637a2-8c59-42af-a9f5-4e80c5466274' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1722.745759] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73af57a5-4675-45e7-a3a7-16d27561f3f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.757501] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961817fa-faaa-4742-bf6e-1114a67f83cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.792081] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529d100c-cf49-4665-b3f0-e6537abea640 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.803773] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c09be7-9cad-48d9-9738-b80e084b1330 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.824537] env[62820]: DEBUG nova.compute.provider_tree [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1722.892213] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696154, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.946518] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696153, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.011661] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1723.012007] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35e8db98-8082-406d-901a-63fdc8aaddf3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.024510] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1723.024510] env[62820]: value = "task-1696155" [ 1723.024510] env[62820]: _type = "Task" [ 1723.024510] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.036932] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.348805] env[62820]: ERROR nova.scheduler.client.report [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [req-837925bd-12a9-4945-8988-f02f2ade5a94] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-837925bd-12a9-4945-8988-f02f2ade5a94"}]} [ 1723.365040] env[62820]: DEBUG nova.scheduler.client.report [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1723.382026] env[62820]: DEBUG nova.scheduler.client.report [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1723.382026] env[62820]: DEBUG nova.compute.provider_tree [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1723.397070] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696154, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.398170] env[62820]: DEBUG nova.scheduler.client.report [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1723.408480] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4323e7df-136f-4bbe-8160-fd7b2579727e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.408757] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.432407] env[62820]: DEBUG nova.scheduler.client.report [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1723.447216] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696153, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.541062] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696155, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.696431] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd271e6e-2608-48f5-bd53-b8ede1853bab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.707067] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57be032-b39d-4c01-b589-285ce5539573 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.743381] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2c92af-9e05-47d0-bac0-742d1ba4615a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.755118] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9575306e-4201-434c-bf3c-4fdc79c2919c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.773591] env[62820]: DEBUG nova.compute.provider_tree [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1723.898844] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696154, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.914279] env[62820]: DEBUG nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1723.962189] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696153, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.047300] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696155, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.308726] env[62820]: DEBUG nova.scheduler.client.report [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 128 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1724.309016] env[62820]: DEBUG nova.compute.provider_tree [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 128 to 129 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1724.309216] env[62820]: DEBUG nova.compute.provider_tree [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1724.396063] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696154, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.441013] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.447259] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696153, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.480501} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.447451] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c/OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c.vmdk to [datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed/1ad372de-b4a3-441d-b9c8-61354d703fed.vmdk. [ 1724.447641] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Cleaning up location [datastore1] OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1724.447809] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_f7ee9734-28a5-4fd5-a1fc-b67ba571317c {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1724.448462] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f92fbd2-84fc-4d04-92e6-711e2393d339 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.456935] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1724.456935] env[62820]: value = "task-1696156" [ 1724.456935] env[62820]: _type = "Task" [ 1724.456935] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.465172] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696156, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.535962] env[62820]: DEBUG oslo_vmware.api [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696155, 'name': PowerOnVM_Task, 'duration_secs': 1.222206} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.536354] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1724.536591] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-121ee1b2-a8f3-4de1-8f4f-d6739750433d tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance '860637a2-8c59-42af-a9f5-4e80c5466274' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1724.814721] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.863s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.817453] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.526s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.818429] env[62820]: DEBUG nova.objects.instance [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lazy-loading 'resources' on Instance uuid 7a755ef6-67bc-4242-9343-c54c8566adf8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1724.850182] env[62820]: INFO nova.scheduler.client.report [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleted allocations for instance 11843b38-3ce4-42a7-b855-a9d0b473e796 [ 1724.899788] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696154, 'name': CreateSnapshot_Task, 'duration_secs': 2.329555} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.900293] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1724.901138] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f0f4be-9b79-4f60-9727-0618dc4d58b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.967814] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696156, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.05097} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.968095] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1724.968325] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed/1ad372de-b4a3-441d-b9c8-61354d703fed.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.968560] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed/1ad372de-b4a3-441d-b9c8-61354d703fed.vmdk to [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690/e420644c-cfcc-4f8c-ae03-c9ebef585690.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1724.968812] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d3cb396-4d5d-481a-aec1-692c669190b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.976597] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1724.976597] env[62820]: value = "task-1696157" [ 1724.976597] env[62820]: _type = "Task" [ 1724.976597] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.984417] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696157, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.246833] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "210277a2-dd10-4e08-8627-4b025a554410" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.247158] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.247356] env[62820]: DEBUG nova.compute.manager [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1725.248383] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8447824-a052-48cb-91bf-2595cecad68e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.257354] env[62820]: DEBUG nova.compute.manager [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1725.258031] env[62820]: DEBUG nova.objects.instance [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'flavor' on Instance uuid 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1725.358966] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c56ed83-3bb1-4b51-852a-28c91932dfa3 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "11843b38-3ce4-42a7-b855-a9d0b473e796" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.421614] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1725.422598] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4107424d-77dc-4ec7-9b52-907f38822e6c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.437589] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1725.437589] env[62820]: value = "task-1696158" [ 1725.437589] env[62820]: _type = "Task" [ 1725.437589] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.449624] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696158, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.492080] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696157, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.563226] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a1c579-c81f-4a6a-abef-6a56fbf718e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.574085] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0a4da2-080a-41b4-b680-5b0882d58cf0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.616073] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc14750-f3ed-4003-89bd-4f1caa5a8e62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.625101] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8592de64-af0b-464a-9773-af47b3f82ec6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.642461] env[62820]: DEBUG nova.compute.provider_tree [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1725.953245] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696158, 'name': CloneVM_Task} progress is 93%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.991911] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696157, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.145804] env[62820]: DEBUG nova.scheduler.client.report [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1726.267410] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1726.267787] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e0128ac-c0ca-48d2-8293-c5ace569a9d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.278011] env[62820]: DEBUG oslo_vmware.api [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1726.278011] env[62820]: value = "task-1696159" [ 1726.278011] env[62820]: _type = "Task" [ 1726.278011] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.292653] env[62820]: DEBUG oslo_vmware.api [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.453269] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696158, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.472554] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.473166] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.473288] env[62820]: INFO nova.compute.manager [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Rebooting instance [ 1726.492975] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696157, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.651650] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.834s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.655431] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.261s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.658632] env[62820]: INFO nova.compute.claims [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1726.684995] env[62820]: INFO nova.scheduler.client.report [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Deleted allocations for instance 7a755ef6-67bc-4242-9343-c54c8566adf8 [ 1726.790933] env[62820]: DEBUG oslo_vmware.api [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696159, 'name': PowerOffVM_Task, 'duration_secs': 0.489522} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.791249] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1726.791453] env[62820]: DEBUG nova.compute.manager [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1726.792735] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadac776-07bd-43c8-8caa-34aa9aeaf34c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.939374] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "860637a2-8c59-42af-a9f5-4e80c5466274" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.939374] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.939374] env[62820]: DEBUG nova.compute.manager [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Going to confirm migration 3 {{(pid=62820) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5127}} [ 1726.952266] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696158, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.991342] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696157, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.993370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.993550] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.993728] env[62820]: DEBUG nova.network.neutron [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1727.199494] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f8af41c1-dc0a-4872-9a83-ae9bd8b4699a tempest-ServersTestManualDisk-1867588506 tempest-ServersTestManualDisk-1867588506-project-member] Lock "7a755ef6-67bc-4242-9343-c54c8566adf8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.591s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.311886] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ed6f67b3-3784-4530-86ef-f8d94972a085 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.455011] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696158, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.500027] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696157, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.510703} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.500631] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1ad372de-b4a3-441d-b9c8-61354d703fed/1ad372de-b4a3-441d-b9c8-61354d703fed.vmdk to [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690/e420644c-cfcc-4f8c-ae03-c9ebef585690.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1727.502032] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc57839-9fa1-418f-a689-737707d50589 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.531045] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690/e420644c-cfcc-4f8c-ae03-c9ebef585690.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1727.532406] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.533027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.533027] env[62820]: DEBUG nova.network.neutron [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1727.533027] env[62820]: DEBUG nova.objects.instance [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lazy-loading 'info_cache' on Instance uuid 860637a2-8c59-42af-a9f5-4e80c5466274 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1727.534200] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cace5e9c-1b45-4641-a4d3-41e8a7ba17d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.562950] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1727.562950] env[62820]: value = "task-1696160" [ 1727.562950] env[62820]: _type = "Task" [ 1727.562950] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.576286] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.608675] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.608909] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.915017] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1fda08-4fc8-4b2a-8a89-e1d591ba0b1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.923370] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a55da37-208a-4422-bb72-2c2c157d5aeb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.955336] env[62820]: DEBUG nova.network.neutron [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.960873] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de186c3-94c2-4cbb-9eee-2d6561925b6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.980892] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696158, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.983999] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a0dd66-32b4-4b0a-98d7-98cfa03b77b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.999543] env[62820]: DEBUG nova.compute.provider_tree [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1728.074790] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696160, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.111623] env[62820]: DEBUG nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1728.458673] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.466287] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696158, 'name': CloneVM_Task} progress is 95%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.523153] env[62820]: ERROR nova.scheduler.client.report [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [req-5ce4bffa-6013-4ad1-ad79-0b68fa4a1dbc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5ce4bffa-6013-4ad1-ad79-0b68fa4a1dbc"}]} [ 1728.542516] env[62820]: DEBUG nova.scheduler.client.report [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1728.563829] env[62820]: DEBUG nova.scheduler.client.report [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1728.564236] env[62820]: DEBUG nova.compute.provider_tree [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1728.579970] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696160, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.581679] env[62820]: DEBUG nova.scheduler.client.report [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1728.601443] env[62820]: DEBUG nova.compute.manager [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Stashing vm_state: stopped {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1728.605512] env[62820]: DEBUG nova.scheduler.client.report [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1728.633562] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.891022] env[62820]: DEBUG nova.network.neutron [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance_info_cache with network_info: [{"id": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "address": "fa:16:3e:a8:e9:1b", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8ab641-c9", "ovs_interfaceid": "2e8ab641-c961-452e-a6eb-d760374ac2b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.893879] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce903164-290f-4d31-8797-46db8a7a49bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.902295] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f17b12-d9e9-48ee-8140-fd957e9d8337 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.936648] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a734899d-2589-458c-a4ce-c71fa58d8317 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.944995] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6ac7f0-b606-4e7b-9f44-d3ba3057bc15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.962960] env[62820]: DEBUG nova.compute.provider_tree [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1728.967314] env[62820]: DEBUG nova.compute.manager [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1728.968323] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3761d8e-6f4d-49e6-ac26-e093ef1dc163 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.981175] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696158, 'name': CloneVM_Task, 'duration_secs': 3.141326} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.981949] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Created linked-clone VM from snapshot [ 1728.982401] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c0eeb3-6a8f-42ec-a532-063807ba0f1c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.991377] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Uploading image 9a38379f-96c5-43f3-94c6-97b0f5ad58c1 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1729.028823] env[62820]: DEBUG oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1729.028823] env[62820]: value = "vm-353643" [ 1729.028823] env[62820]: _type = "VirtualMachine" [ 1729.028823] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1729.029452] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5ed29316-290d-4772-862f-3d54b8f6399e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.037947] env[62820]: DEBUG oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lease: (returnval){ [ 1729.037947] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5232ca73-aa2d-afcf-6dfa-40c64a292327" [ 1729.037947] env[62820]: _type = "HttpNfcLease" [ 1729.037947] env[62820]: } obtained for exporting VM: (result){ [ 1729.037947] env[62820]: value = "vm-353643" [ 1729.037947] env[62820]: _type = "VirtualMachine" [ 1729.037947] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1729.038217] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the lease: (returnval){ [ 1729.038217] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5232ca73-aa2d-afcf-6dfa-40c64a292327" [ 1729.038217] env[62820]: _type = "HttpNfcLease" [ 1729.038217] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1729.045522] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1729.045522] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5232ca73-aa2d-afcf-6dfa-40c64a292327" [ 1729.045522] env[62820]: _type = "HttpNfcLease" [ 1729.045522] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1729.077341] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696160, 'name': ReconfigVM_Task, 'duration_secs': 1.308252} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.077638] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Reconfigured VM instance instance-00000048 to attach disk [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690/e420644c-cfcc-4f8c-ae03-c9ebef585690.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1729.078334] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54d45eef-a4e4-46ae-9c99-90d88a017fd3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.086429] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1729.086429] env[62820]: value = "task-1696162" [ 1729.086429] env[62820]: _type = "Task" [ 1729.086429] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.094800] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696162, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.123913] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.404363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-860637a2-8c59-42af-a9f5-4e80c5466274" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.404363] env[62820]: DEBUG nova.objects.instance [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lazy-loading 'migration_context' on Instance uuid 860637a2-8c59-42af-a9f5-4e80c5466274 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1729.510359] env[62820]: DEBUG nova.scheduler.client.report [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 132 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1729.510359] env[62820]: DEBUG nova.compute.provider_tree [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 132 to 133 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1729.510359] env[62820]: DEBUG nova.compute.provider_tree [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1729.548218] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1729.548218] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5232ca73-aa2d-afcf-6dfa-40c64a292327" [ 1729.548218] env[62820]: _type = "HttpNfcLease" [ 1729.548218] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1729.548218] env[62820]: DEBUG oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1729.548218] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5232ca73-aa2d-afcf-6dfa-40c64a292327" [ 1729.548218] env[62820]: _type = "HttpNfcLease" [ 1729.548218] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1729.548218] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae89d8a0-a5d6-4f23-bd0e-8034325f28e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.556222] env[62820]: DEBUG oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bbc374-395e-4f9b-420c-ecd81adf6676/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1729.556436] env[62820]: DEBUG oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bbc374-395e-4f9b-420c-ecd81adf6676/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1729.633212] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696162, 'name': Rename_Task, 'duration_secs': 0.162125} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.633212] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1729.633212] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd380ee9-18bb-44ba-80c5-5a5be6169816 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.639811] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1729.639811] env[62820]: value = "task-1696163" [ 1729.639811] env[62820]: _type = "Task" [ 1729.639811] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.648538] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.653044] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3d9652cf-63e1-483d-8e4a-f6350c460a46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.906493] env[62820]: DEBUG nova.objects.base [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Object Instance<860637a2-8c59-42af-a9f5-4e80c5466274> lazy-loaded attributes: info_cache,migration_context {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1729.907510] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78b18a7-effe-479a-824c-ef8797c70ab9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.928724] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b532af6-4e3c-4138-b479-c358969d5aa7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.936249] env[62820]: DEBUG oslo_vmware.api [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1729.936249] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f9a4f9-fee8-2a0a-fa53-c30ebd53e981" [ 1729.936249] env[62820]: _type = "Task" [ 1729.936249] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.945728] env[62820]: DEBUG oslo_vmware.api [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f9a4f9-fee8-2a0a-fa53-c30ebd53e981, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.991328] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71fa4e8-743f-4e40-b0e2-9b678cef7ae6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.000496] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Doing hard reboot of VM {{(pid=62820) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1730.000787] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b7a9f900-f6fd-4dd4-bb9d-cdae1a8bb40f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.008932] env[62820]: DEBUG oslo_vmware.api [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1730.008932] env[62820]: value = "task-1696164" [ 1730.008932] env[62820]: _type = "Task" [ 1730.008932] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.019986] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.365s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.020743] env[62820]: DEBUG nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1730.023937] env[62820]: DEBUG oslo_vmware.api [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696164, 'name': ResetVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.024661] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.584s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.026365] env[62820]: INFO nova.compute.claims [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1730.154610] env[62820]: DEBUG oslo_vmware.api [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696163, 'name': PowerOnVM_Task, 'duration_secs': 0.47657} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.155163] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1730.282200] env[62820]: DEBUG nova.compute.manager [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1730.282717] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be78eaa1-381c-4843-8291-733b9befa297 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.449737] env[62820]: DEBUG oslo_vmware.api [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f9a4f9-fee8-2a0a-fa53-c30ebd53e981, 'name': SearchDatastore_Task, 'duration_secs': 0.009325} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.450099] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.519404] env[62820]: DEBUG oslo_vmware.api [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696164, 'name': ResetVM_Task, 'duration_secs': 0.110097} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.519798] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Did hard reboot of VM {{(pid=62820) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1730.520073] env[62820]: DEBUG nova.compute.manager [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1730.521050] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08df8d77-892b-4ecf-9130-3a6f7b54c435 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.526507] env[62820]: DEBUG nova.compute.utils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1730.529554] env[62820]: DEBUG nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1730.530103] env[62820]: DEBUG nova.network.neutron [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1730.574734] env[62820]: DEBUG nova.policy [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba8e4dc4cd634bf293d02187fbc77b72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ca1b6f7bda3437eb67f5f765b5864a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1730.805472] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b20c59fb-49fb-41a4-a73e-23a9c338f2dc tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.311s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.034854] env[62820]: DEBUG nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1731.046021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2dcc580c-67ad-4b1a-9cc2-e7ba8ada6457 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.571s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.129776] env[62820]: DEBUG nova.network.neutron [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Successfully created port: d7eba315-055d-4512-aa37-ec25ee38d9d0 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1731.350551] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc84df78-ecd5-4dee-831b-2e22cbe3b65b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.360015] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76da406c-fc05-4bb5-a3fc-f742b1dfc1ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.392959] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43addba6-6365-46dc-b80c-639ef4ef83f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.402902] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa761b6c-3bf8-4e1d-b933-a94cf520740a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.419384] env[62820]: DEBUG nova.compute.provider_tree [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1731.923359] env[62820]: DEBUG nova.scheduler.client.report [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1732.046553] env[62820]: DEBUG nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1732.072958] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1732.073232] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1732.073391] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1732.073575] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1732.073721] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1732.073873] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1732.074087] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1732.074261] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1732.074476] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1732.074655] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1732.075427] env[62820]: DEBUG nova.virt.hardware [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1732.075720] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c76e597-1f25-45cd-b82b-2739204f7a9e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.085064] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c97bee8-552a-4bd1-83ac-de3bed96eef8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.428983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.429672] env[62820]: DEBUG nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1732.432347] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.799s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.433961] env[62820]: INFO nova.compute.claims [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1732.656904] env[62820]: DEBUG nova.compute.manager [req-e2580e88-4e76-4eaa-9f2b-800c12cc02d1 req-20891777-6c8e-49fa-b897-b4a507ab0653 service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Received event network-vif-plugged-d7eba315-055d-4512-aa37-ec25ee38d9d0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1732.657184] env[62820]: DEBUG oslo_concurrency.lockutils [req-e2580e88-4e76-4eaa-9f2b-800c12cc02d1 req-20891777-6c8e-49fa-b897-b4a507ab0653 service nova] Acquiring lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.657448] env[62820]: DEBUG oslo_concurrency.lockutils [req-e2580e88-4e76-4eaa-9f2b-800c12cc02d1 req-20891777-6c8e-49fa-b897-b4a507ab0653 service nova] Lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.657691] env[62820]: DEBUG oslo_concurrency.lockutils [req-e2580e88-4e76-4eaa-9f2b-800c12cc02d1 req-20891777-6c8e-49fa-b897-b4a507ab0653 service nova] Lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.657965] env[62820]: DEBUG nova.compute.manager [req-e2580e88-4e76-4eaa-9f2b-800c12cc02d1 req-20891777-6c8e-49fa-b897-b4a507ab0653 service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] No waiting events found dispatching network-vif-plugged-d7eba315-055d-4512-aa37-ec25ee38d9d0 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1732.658163] env[62820]: WARNING nova.compute.manager [req-e2580e88-4e76-4eaa-9f2b-800c12cc02d1 req-20891777-6c8e-49fa-b897-b4a507ab0653 service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Received unexpected event network-vif-plugged-d7eba315-055d-4512-aa37-ec25ee38d9d0 for instance with vm_state building and task_state spawning. [ 1732.754244] env[62820]: DEBUG nova.network.neutron [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Successfully updated port: d7eba315-055d-4512-aa37-ec25ee38d9d0 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1732.773881] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed309bb-3028-4350-936d-84c852674f5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.782296] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f1af7a-64a6-4a47-ab4d-73ba42a337bd tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Suspending the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1732.782551] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-b8a81212-41ae-43af-92df-71b6722cb1bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.791469] env[62820]: DEBUG oslo_vmware.api [None req-f3f1af7a-64a6-4a47-ab4d-73ba42a337bd tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1732.791469] env[62820]: value = "task-1696165" [ 1732.791469] env[62820]: _type = "Task" [ 1732.791469] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.801326] env[62820]: DEBUG oslo_vmware.api [None req-f3f1af7a-64a6-4a47-ab4d-73ba42a337bd tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696165, 'name': SuspendVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.938909] env[62820]: DEBUG nova.compute.utils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1732.942080] env[62820]: DEBUG nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1732.942274] env[62820]: DEBUG nova.network.neutron [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1733.011515] env[62820]: DEBUG nova.policy [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e81a169ac4144a5bbc0a4e3a077cb4a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65abf73e789b48d3ba24e2660d7c0341', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1733.256770] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "refresh_cache-82379c63-8dce-4b61-afb9-9b6a5ff605b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.257288] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "refresh_cache-82379c63-8dce-4b61-afb9-9b6a5ff605b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.257288] env[62820]: DEBUG nova.network.neutron [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1733.304240] env[62820]: DEBUG oslo_vmware.api [None req-f3f1af7a-64a6-4a47-ab4d-73ba42a337bd tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696165, 'name': SuspendVM_Task} progress is 58%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.336508] env[62820]: DEBUG nova.network.neutron [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Successfully created port: 8cea8850-c5a0-4831-99cc-8920c44710b7 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1733.443031] env[62820]: DEBUG nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1733.703339] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e13603b-b4fc-4c0d-beab-5229ee34e3b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.713127] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45525dd3-7afc-42a0-af9a-afceaffa30d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.744023] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d009d4-3e57-43c5-955f-0581e5715f16 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.752592] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181eb47a-897d-4dd6-84d6-974dc7fe358d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.768611] env[62820]: DEBUG nova.compute.provider_tree [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1733.799374] env[62820]: DEBUG nova.network.neutron [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1733.804604] env[62820]: DEBUG oslo_vmware.api [None req-f3f1af7a-64a6-4a47-ab4d-73ba42a337bd tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696165, 'name': SuspendVM_Task, 'duration_secs': 0.901702} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.804925] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f1af7a-64a6-4a47-ab4d-73ba42a337bd tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Suspended the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1733.805131] env[62820]: DEBUG nova.compute.manager [None req-f3f1af7a-64a6-4a47-ab4d-73ba42a337bd tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1733.805895] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e2298a-75c5-45dc-9a54-8d7cab460fee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.965602] env[62820]: DEBUG nova.network.neutron [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Updating instance_info_cache with network_info: [{"id": "d7eba315-055d-4512-aa37-ec25ee38d9d0", "address": "fa:16:3e:4e:52:bb", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7eba315-05", "ovs_interfaceid": "d7eba315-055d-4512-aa37-ec25ee38d9d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.272137] env[62820]: DEBUG nova.scheduler.client.report [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1734.459023] env[62820]: DEBUG nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1734.468151] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "refresh_cache-82379c63-8dce-4b61-afb9-9b6a5ff605b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.468756] env[62820]: DEBUG nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Instance network_info: |[{"id": "d7eba315-055d-4512-aa37-ec25ee38d9d0", "address": "fa:16:3e:4e:52:bb", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7eba315-05", "ovs_interfaceid": "d7eba315-055d-4512-aa37-ec25ee38d9d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1734.468962] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:52:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7eba315-055d-4512-aa37-ec25ee38d9d0', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1734.476695] env[62820]: DEBUG oslo.service.loopingcall [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1734.476914] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1734.479441] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43ed163b-9d2b-4ce1-9d82-6b81018fb905 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.499214] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1734.499479] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1734.499687] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1734.499894] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1734.500058] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1734.500211] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1734.500416] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1734.500572] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1734.500738] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1734.500898] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1734.501124] env[62820]: DEBUG nova.virt.hardware [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1734.501964] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169cae03-1880-4ba3-a99f-dad398069e5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.505746] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1734.505746] env[62820]: value = "task-1696166" [ 1734.505746] env[62820]: _type = "Task" [ 1734.505746] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.512662] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42bf086-1c34-4bde-9bb1-5b1a4851474f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.519916] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696166, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.724245] env[62820]: DEBUG nova.compute.manager [req-816f8de4-6ba3-4997-bf24-ee1484a6c8bf req-795259cf-175d-4ee9-abbb-a1ebd0cbb07e service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Received event network-changed-d7eba315-055d-4512-aa37-ec25ee38d9d0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1734.724470] env[62820]: DEBUG nova.compute.manager [req-816f8de4-6ba3-4997-bf24-ee1484a6c8bf req-795259cf-175d-4ee9-abbb-a1ebd0cbb07e service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Refreshing instance network info cache due to event network-changed-d7eba315-055d-4512-aa37-ec25ee38d9d0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1734.724706] env[62820]: DEBUG oslo_concurrency.lockutils [req-816f8de4-6ba3-4997-bf24-ee1484a6c8bf req-795259cf-175d-4ee9-abbb-a1ebd0cbb07e service nova] Acquiring lock "refresh_cache-82379c63-8dce-4b61-afb9-9b6a5ff605b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.724868] env[62820]: DEBUG oslo_concurrency.lockutils [req-816f8de4-6ba3-4997-bf24-ee1484a6c8bf req-795259cf-175d-4ee9-abbb-a1ebd0cbb07e service nova] Acquired lock "refresh_cache-82379c63-8dce-4b61-afb9-9b6a5ff605b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.725058] env[62820]: DEBUG nova.network.neutron [req-816f8de4-6ba3-4997-bf24-ee1484a6c8bf req-795259cf-175d-4ee9-abbb-a1ebd0cbb07e service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Refreshing network info cache for port d7eba315-055d-4512-aa37-ec25ee38d9d0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1734.777220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.777743] env[62820]: DEBUG nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1734.780813] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.657s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.017197] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696166, 'name': CreateVM_Task, 'duration_secs': 0.46043} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.017586] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1735.018129] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.018328] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.018683] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1735.018970] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3061841-28fe-40af-a915-add275f2955d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.024167] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1735.024167] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ecebc5-ce72-cdf8-1e69-d62e5be626e8" [ 1735.024167] env[62820]: _type = "Task" [ 1735.024167] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.032599] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ecebc5-ce72-cdf8-1e69-d62e5be626e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.059584] env[62820]: DEBUG nova.network.neutron [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Successfully updated port: 8cea8850-c5a0-4831-99cc-8920c44710b7 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1735.207000] env[62820]: INFO nova.compute.manager [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Resuming [ 1735.207635] env[62820]: DEBUG nova.objects.instance [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lazy-loading 'flavor' on Instance uuid e420644c-cfcc-4f8c-ae03-c9ebef585690 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1735.284208] env[62820]: DEBUG nova.compute.utils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1735.288049] env[62820]: INFO nova.compute.claims [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1735.291946] env[62820]: DEBUG nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1735.292081] env[62820]: DEBUG nova.network.neutron [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1735.350278] env[62820]: DEBUG nova.policy [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1735.505860] env[62820]: DEBUG nova.network.neutron [req-816f8de4-6ba3-4997-bf24-ee1484a6c8bf req-795259cf-175d-4ee9-abbb-a1ebd0cbb07e service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Updated VIF entry in instance network info cache for port d7eba315-055d-4512-aa37-ec25ee38d9d0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1735.507812] env[62820]: DEBUG nova.network.neutron [req-816f8de4-6ba3-4997-bf24-ee1484a6c8bf req-795259cf-175d-4ee9-abbb-a1ebd0cbb07e service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Updating instance_info_cache with network_info: [{"id": "d7eba315-055d-4512-aa37-ec25ee38d9d0", "address": "fa:16:3e:4e:52:bb", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7eba315-05", "ovs_interfaceid": "d7eba315-055d-4512-aa37-ec25ee38d9d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.537142] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ecebc5-ce72-cdf8-1e69-d62e5be626e8, 'name': SearchDatastore_Task, 'duration_secs': 0.0139} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.537420] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.537660] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1735.537891] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.538072] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.538246] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1735.538611] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17cfe51b-1646-4db9-9a19-7737f435c479 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.548560] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1735.548747] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1735.549540] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf29d326-c488-4547-8a51-11c5a7356f0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.556455] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1735.556455] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523079b8-574a-b5b9-8e99-8e923304513a" [ 1735.556455] env[62820]: _type = "Task" [ 1735.556455] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.562496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.562549] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.562725] env[62820]: DEBUG nova.network.neutron [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1735.571046] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523079b8-574a-b5b9-8e99-8e923304513a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.641298] env[62820]: DEBUG nova.network.neutron [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Successfully created port: 37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1735.792537] env[62820]: DEBUG nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1735.796353] env[62820]: INFO nova.compute.resource_tracker [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating resource usage from migration cf85995d-0f02-4aa1-b5bb-a279754babe4 [ 1736.009340] env[62820]: DEBUG oslo_concurrency.lockutils [req-816f8de4-6ba3-4997-bf24-ee1484a6c8bf req-795259cf-175d-4ee9-abbb-a1ebd0cbb07e service nova] Releasing lock "refresh_cache-82379c63-8dce-4b61-afb9-9b6a5ff605b5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.053739] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3727fc92-bd98-4c65-9e3b-a7672f792fa9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.063280] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52dd2a9-7128-4c54-bd5e-ace99fa18720 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.071211] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523079b8-574a-b5b9-8e99-8e923304513a, 'name': SearchDatastore_Task, 'duration_secs': 0.014542} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.074758] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-676e10e9-998f-4783-a139-40bfafca891e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.105694] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b66e30-a40f-4c2a-86fc-92347f9d6283 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.109615] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1736.109615] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5277a5ec-80ad-df47-a420-1125d6ad8834" [ 1736.109615] env[62820]: _type = "Task" [ 1736.109615] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.116608] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7c5de9-d324-4ffe-8e12-cfc25971e90c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.123496] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5277a5ec-80ad-df47-a420-1125d6ad8834, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.133758] env[62820]: DEBUG nova.compute.provider_tree [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1736.135779] env[62820]: DEBUG nova.network.neutron [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1736.279043] env[62820]: DEBUG nova.network.neutron [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance_info_cache with network_info: [{"id": "8cea8850-c5a0-4831-99cc-8920c44710b7", "address": "fa:16:3e:a4:96:78", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cea8850-c5", "ovs_interfaceid": "8cea8850-c5a0-4831-99cc-8920c44710b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.622497] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5277a5ec-80ad-df47-a420-1125d6ad8834, 'name': SearchDatastore_Task, 'duration_secs': 0.014284} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.622791] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.623059] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 82379c63-8dce-4b61-afb9-9b6a5ff605b5/82379c63-8dce-4b61-afb9-9b6a5ff605b5.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1736.623338] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c25b0f2b-658a-455d-a488-b348be610090 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.632300] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1736.632300] env[62820]: value = "task-1696167" [ 1736.632300] env[62820]: _type = "Task" [ 1736.632300] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.639133] env[62820]: DEBUG nova.scheduler.client.report [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1736.645772] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.719015] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.719434] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquired lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.719764] env[62820]: DEBUG nova.network.neutron [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1736.751375] env[62820]: DEBUG nova.compute.manager [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Received event network-vif-plugged-8cea8850-c5a0-4831-99cc-8920c44710b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1736.751448] env[62820]: DEBUG oslo_concurrency.lockutils [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] Acquiring lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.751670] env[62820]: DEBUG oslo_concurrency.lockutils [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.751849] env[62820]: DEBUG oslo_concurrency.lockutils [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.752102] env[62820]: DEBUG nova.compute.manager [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] No waiting events found dispatching network-vif-plugged-8cea8850-c5a0-4831-99cc-8920c44710b7 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1736.752305] env[62820]: WARNING nova.compute.manager [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Received unexpected event network-vif-plugged-8cea8850-c5a0-4831-99cc-8920c44710b7 for instance with vm_state building and task_state spawning. [ 1736.752495] env[62820]: DEBUG nova.compute.manager [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Received event network-changed-8cea8850-c5a0-4831-99cc-8920c44710b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1736.752666] env[62820]: DEBUG nova.compute.manager [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Refreshing instance network info cache due to event network-changed-8cea8850-c5a0-4831-99cc-8920c44710b7. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1736.752848] env[62820]: DEBUG oslo_concurrency.lockutils [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] Acquiring lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.781543] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.781872] env[62820]: DEBUG nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Instance network_info: |[{"id": "8cea8850-c5a0-4831-99cc-8920c44710b7", "address": "fa:16:3e:a4:96:78", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cea8850-c5", "ovs_interfaceid": "8cea8850-c5a0-4831-99cc-8920c44710b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1736.782256] env[62820]: DEBUG oslo_concurrency.lockutils [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] Acquired lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.782476] env[62820]: DEBUG nova.network.neutron [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Refreshing network info cache for port 8cea8850-c5a0-4831-99cc-8920c44710b7 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1736.783871] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:96:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8cea8850-c5a0-4831-99cc-8920c44710b7', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1736.792273] env[62820]: DEBUG oslo.service.loopingcall [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.793644] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1736.793934] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55b49988-2e69-449c-a0b5-6d294f241451 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.811028] env[62820]: DEBUG nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1736.820543] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1736.820543] env[62820]: value = "task-1696168" [ 1736.820543] env[62820]: _type = "Task" [ 1736.820543] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.831730] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696168, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.840743] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1736.841037] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1736.841219] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1736.841457] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1736.841621] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1736.841777] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1736.842000] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1736.842217] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1736.842411] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1736.842599] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1736.842826] env[62820]: DEBUG nova.virt.hardware [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1736.843745] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1ad056-652f-41f9-80cc-c9e5efb3bb8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.854065] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be265494-4747-430e-a794-c50e38e80b80 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.926999] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1736.927404] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.150498] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.370s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.150848] env[62820]: INFO nova.compute.manager [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Migrating [ 1737.158313] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696167, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.161636] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 6.711s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.182321] env[62820]: DEBUG nova.compute.manager [req-6679e643-b7cf-4dce-8f07-697ff8dea13f req-ba04f290-32bb-4ecf-95ca-03133fae6cf5 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-vif-plugged-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1737.182554] env[62820]: DEBUG oslo_concurrency.lockutils [req-6679e643-b7cf-4dce-8f07-697ff8dea13f req-ba04f290-32bb-4ecf-95ca-03133fae6cf5 service nova] Acquiring lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.182761] env[62820]: DEBUG oslo_concurrency.lockutils [req-6679e643-b7cf-4dce-8f07-697ff8dea13f req-ba04f290-32bb-4ecf-95ca-03133fae6cf5 service nova] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.182977] env[62820]: DEBUG oslo_concurrency.lockutils [req-6679e643-b7cf-4dce-8f07-697ff8dea13f req-ba04f290-32bb-4ecf-95ca-03133fae6cf5 service nova] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.183243] env[62820]: DEBUG nova.compute.manager [req-6679e643-b7cf-4dce-8f07-697ff8dea13f req-ba04f290-32bb-4ecf-95ca-03133fae6cf5 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] No waiting events found dispatching network-vif-plugged-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1737.183486] env[62820]: WARNING nova.compute.manager [req-6679e643-b7cf-4dce-8f07-697ff8dea13f req-ba04f290-32bb-4ecf-95ca-03133fae6cf5 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received unexpected event network-vif-plugged-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 for instance with vm_state building and task_state spawning. [ 1737.304244] env[62820]: DEBUG nova.network.neutron [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Successfully updated port: 37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1737.330447] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696168, 'name': CreateVM_Task, 'duration_secs': 0.469573} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.330612] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1737.333186] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.333456] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.333690] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1737.334201] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d186190e-014e-4e36-9cdf-867a459f5eba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.339252] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1737.339252] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52123633-4ed7-8cdc-3afc-600d2d1a5cdc" [ 1737.339252] env[62820]: _type = "Task" [ 1737.339252] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.347522] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52123633-4ed7-8cdc-3afc-600d2d1a5cdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.380812] env[62820]: DEBUG oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bbc374-395e-4f9b-420c-ecd81adf6676/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1737.381748] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919b5c85-64c8-4aca-a96b-f17fd2aa40aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.388229] env[62820]: DEBUG oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bbc374-395e-4f9b-420c-ecd81adf6676/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1737.388416] env[62820]: ERROR oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bbc374-395e-4f9b-420c-ecd81adf6676/disk-0.vmdk due to incomplete transfer. [ 1737.388706] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-24d3f1ab-e103-4f7a-a030-66bcdcb45402 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.395928] env[62820]: DEBUG oslo_vmware.rw_handles [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bbc374-395e-4f9b-420c-ecd81adf6676/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1737.396135] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Uploaded image 9a38379f-96c5-43f3-94c6-97b0f5ad58c1 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1737.398501] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1737.398724] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-058275ae-6149-4c09-a9da-d1412839627f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.407011] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1737.407011] env[62820]: value = "task-1696169" [ 1737.407011] env[62820]: _type = "Task" [ 1737.407011] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.416122] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696169, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.439026] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.439026] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1737.439026] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 1737.551300] env[62820]: DEBUG nova.network.neutron [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [{"id": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "address": "fa:16:3e:ed:27:27", "network": {"id": "90e74a55-bce9-47b4-aaa0-8f288fc6a438", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-23421215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14788b1c55684c2fbd3c07bff18757f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c81ac6d-fc", "ovs_interfaceid": "8c81ac6d-fc1a-4519-81f6-1a3a523acee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.597164] env[62820]: DEBUG nova.network.neutron [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updated VIF entry in instance network info cache for port 8cea8850-c5a0-4831-99cc-8920c44710b7. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1737.598616] env[62820]: DEBUG nova.network.neutron [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance_info_cache with network_info: [{"id": "8cea8850-c5a0-4831-99cc-8920c44710b7", "address": "fa:16:3e:a4:96:78", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cea8850-c5", "ovs_interfaceid": "8cea8850-c5a0-4831-99cc-8920c44710b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.647484] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637804} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.647806] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 82379c63-8dce-4b61-afb9-9b6a5ff605b5/82379c63-8dce-4b61-afb9-9b6a5ff605b5.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1737.648065] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1737.648359] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8fedddaa-ccee-45ec-8b4e-283640889621 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.657770] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1737.657770] env[62820]: value = "task-1696170" [ 1737.657770] env[62820]: _type = "Task" [ 1737.657770] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.667637] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.673983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.674390] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.674690] env[62820]: DEBUG nova.network.neutron [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1737.806415] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.806600] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.806948] env[62820]: DEBUG nova.network.neutron [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1737.850087] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52123633-4ed7-8cdc-3afc-600d2d1a5cdc, 'name': SearchDatastore_Task, 'duration_secs': 0.009865} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.850711] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.851017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1737.851193] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.851343] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.851519] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1737.853935] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b302b629-1be9-4a0c-bea4-9a7c1e8d6925 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.864313] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1737.864313] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1737.865094] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb94fc81-ed92-4371-97e7-b327fb3acf7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.872987] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1737.872987] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ab1c84-8168-91b6-33c8-1497a7dc963a" [ 1737.872987] env[62820]: _type = "Task" [ 1737.872987] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.881281] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ab1c84-8168-91b6-33c8-1497a7dc963a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.903611] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d34e38c-6647-4518-84ab-a08f26b0870a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.912874] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9463d42-3d44-48ae-ae50-163db31848fe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.918760] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696169, 'name': Destroy_Task, 'duration_secs': 0.363241} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.919346] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Destroyed the VM [ 1737.919598] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1737.919822] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-999441be-5fa9-408c-94d4-e7226b782486 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.948349] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1737.948485] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1737.948622] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Skipping network cache update for instance because it is Building. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10299}} [ 1737.953203] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e29fc11-c429-43ee-a154-74cf5f5cc953 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.955780] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1737.955780] env[62820]: value = "task-1696171" [ 1737.955780] env[62820]: _type = "Task" [ 1737.955780] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.964222] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0709ea76-1d43-4454-a12a-0d1a152cc58d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.970869] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696171, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.981253] env[62820]: DEBUG nova.compute.provider_tree [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1737.983198] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.983331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.983473] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1737.983625] env[62820]: DEBUG nova.objects.instance [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lazy-loading 'info_cache' on Instance uuid b89d32f8-0675-4b0c-977e-b7900e62bdd8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1738.054349] env[62820]: DEBUG oslo_concurrency.lockutils [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Releasing lock "refresh_cache-e420644c-cfcc-4f8c-ae03-c9ebef585690" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.055593] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6a80ed-1248-4660-bfd6-ddf4d2ba6c23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.063687] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Resuming the VM {{(pid=62820) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1738.063963] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dc6033b-3e8d-46bc-9083-320efc865232 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.071513] env[62820]: DEBUG oslo_vmware.api [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1738.071513] env[62820]: value = "task-1696172" [ 1738.071513] env[62820]: _type = "Task" [ 1738.071513] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.079927] env[62820]: DEBUG oslo_vmware.api [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.100963] env[62820]: DEBUG oslo_concurrency.lockutils [req-64055449-7524-409e-89ac-c41a59d77fbe req-7450e33f-7b62-4140-a002-182439731224 service nova] Releasing lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.167516] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066539} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.167807] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1738.168644] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0727ed0-01fc-493e-ab5f-b69e14365a0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.193982] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 82379c63-8dce-4b61-afb9-9b6a5ff605b5/82379c63-8dce-4b61-afb9-9b6a5ff605b5.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1738.194328] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2d64ac6-6934-4a90-9c7f-7feec5db5c7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.216637] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1738.216637] env[62820]: value = "task-1696173" [ 1738.216637] env[62820]: _type = "Task" [ 1738.216637] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.226388] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696173, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.355864] env[62820]: DEBUG nova.network.neutron [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1738.385042] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ab1c84-8168-91b6-33c8-1497a7dc963a, 'name': SearchDatastore_Task, 'duration_secs': 0.008921} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.385852] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28d0394a-a0a1-45d3-9190-74c4ef682db9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.392589] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1738.392589] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52667272-3988-5623-188e-30b6809b2a1f" [ 1738.392589] env[62820]: _type = "Task" [ 1738.392589] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.401998] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52667272-3988-5623-188e-30b6809b2a1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.466335] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696171, 'name': RemoveSnapshot_Task, 'duration_secs': 0.412027} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.469272] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1738.469643] env[62820]: DEBUG nova.compute.manager [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1738.470515] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1b9d49-e077-4d98-8211-394772669486 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.486500] env[62820]: DEBUG nova.scheduler.client.report [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1738.581683] env[62820]: DEBUG oslo_vmware.api [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.582671] env[62820]: DEBUG nova.network.neutron [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance_info_cache with network_info: [{"id": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "address": "fa:16:3e:e8:03:6c", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ea13ce-9e", "ovs_interfaceid": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.586246] env[62820]: DEBUG nova.network.neutron [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.730490] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696173, 'name': ReconfigVM_Task, 'duration_secs': 0.320525} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.730787] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 82379c63-8dce-4b61-afb9-9b6a5ff605b5/82379c63-8dce-4b61-afb9-9b6a5ff605b5.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1738.731506] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3ee4131-3491-4abd-b0a1-4a1e5210f31d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.741542] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1738.741542] env[62820]: value = "task-1696174" [ 1738.741542] env[62820]: _type = "Task" [ 1738.741542] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.752478] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696174, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.909794] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52667272-3988-5623-188e-30b6809b2a1f, 'name': SearchDatastore_Task, 'duration_secs': 0.009238} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.912935] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.912935] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4323e7df-136f-4bbe-8160-fd7b2579727e/4323e7df-136f-4bbe-8160-fd7b2579727e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1738.912935] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7199e6ac-adc8-4cad-a39a-04a26ff79e39 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.926698] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1738.926698] env[62820]: value = "task-1696175" [ 1738.926698] env[62820]: _type = "Task" [ 1738.926698] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.938132] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696175, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.984815] env[62820]: INFO nova.compute.manager [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Shelve offloading [ 1739.084966] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.086858] env[62820]: DEBUG oslo_vmware.api [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696172, 'name': PowerOnVM_Task, 'duration_secs': 0.739783} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.087025] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Resumed the VM {{(pid=62820) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1739.087224] env[62820]: DEBUG nova.compute.manager [None req-05fe9457-2567-42a2-9b52-de2bf7ea2db8 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1739.088156] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c290f511-cba0-4cd2-bf1f-324f347ab1b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.091528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.091809] env[62820]: DEBUG nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Instance network_info: |[{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1739.092343] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:6d:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da623279-b6f6-4570-8b15-a332120b8b60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37f8bb7e-538f-426a-a4e3-1ae811cad8d3', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1739.102752] env[62820]: DEBUG oslo.service.loopingcall [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1739.104444] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1739.104761] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5af86bab-d25d-4a4c-b770-c4bf90203553 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.139436] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1739.139436] env[62820]: value = "task-1696176" [ 1739.139436] env[62820]: _type = "Task" [ 1739.139436] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.148515] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696176, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.220260] env[62820]: DEBUG nova.compute.manager [req-a0fa5d84-88c5-45c4-90d8-429cb8ca5d54 req-6cf97607-d85a-47bf-90a6-ea85493e2b29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1739.220546] env[62820]: DEBUG nova.compute.manager [req-a0fa5d84-88c5-45c4-90d8-429cb8ca5d54 req-6cf97607-d85a-47bf-90a6-ea85493e2b29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing instance network info cache due to event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1739.220687] env[62820]: DEBUG oslo_concurrency.lockutils [req-a0fa5d84-88c5-45c4-90d8-429cb8ca5d54 req-6cf97607-d85a-47bf-90a6-ea85493e2b29 service nova] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.220839] env[62820]: DEBUG oslo_concurrency.lockutils [req-a0fa5d84-88c5-45c4-90d8-429cb8ca5d54 req-6cf97607-d85a-47bf-90a6-ea85493e2b29 service nova] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.220994] env[62820]: DEBUG nova.network.neutron [req-a0fa5d84-88c5-45c4-90d8-429cb8ca5d54 req-6cf97607-d85a-47bf-90a6-ea85493e2b29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1739.257015] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696174, 'name': Rename_Task, 'duration_secs': 0.356767} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.257378] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1739.257663] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f77c37d-888c-438c-a9fe-970b3e96c5be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.266213] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1739.266213] env[62820]: value = "task-1696177" [ 1739.266213] env[62820]: _type = "Task" [ 1739.266213] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.276594] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.441500] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696175, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.488541] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1739.488871] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-609ea326-d6b4-4195-a9d0-70e059f0c368 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.501697] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.340s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.504851] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1739.504851] env[62820]: value = "task-1696178" [ 1739.504851] env[62820]: _type = "Task" [ 1739.504851] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.517903] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1739.518175] env[62820]: DEBUG nova.compute.manager [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1739.519035] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2207105f-d34c-4e0d-94c3-37da92179a68 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.526247] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.526528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.526744] env[62820]: DEBUG nova.network.neutron [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1739.651083] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696176, 'name': CreateVM_Task, 'duration_secs': 0.483565} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.651259] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1739.651957] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.652136] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.652574] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1739.653863] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f5f58a3-2106-4e83-bbd3-a83a81778369 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.660918] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1739.660918] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52831c8c-dcac-e848-7317-893348e03d06" [ 1739.660918] env[62820]: _type = "Task" [ 1739.660918] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.669758] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52831c8c-dcac-e848-7317-893348e03d06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.777443] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696177, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.903775] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.941044] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696175, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623803} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.941044] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4323e7df-136f-4bbe-8160-fd7b2579727e/4323e7df-136f-4bbe-8160-fd7b2579727e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1739.941044] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1739.941044] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-352cf906-0255-4c21-8ba1-ac11c541d757 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.949855] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1739.949855] env[62820]: value = "task-1696179" [ 1739.949855] env[62820]: _type = "Task" [ 1739.949855] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.960054] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.070886] env[62820]: INFO nova.scheduler.client.report [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted allocation for migration 88e49372-0e6b-4197-a1c5-095a6ee63d37 [ 1740.135184] env[62820]: DEBUG nova.network.neutron [req-a0fa5d84-88c5-45c4-90d8-429cb8ca5d54 req-6cf97607-d85a-47bf-90a6-ea85493e2b29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updated VIF entry in instance network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1740.135639] env[62820]: DEBUG nova.network.neutron [req-a0fa5d84-88c5-45c4-90d8-429cb8ca5d54 req-6cf97607-d85a-47bf-90a6-ea85493e2b29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.173266] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52831c8c-dcac-e848-7317-893348e03d06, 'name': SearchDatastore_Task, 'duration_secs': 0.023678} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.176938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.176938] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1740.176938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.176938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.176938] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1740.176938] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57745d29-cd2f-49cd-a3b0-ef6c812f3afa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.193823] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1740.194033] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1740.194741] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94f3af72-d91a-4acf-83a2-89af92a8218e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.200943] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1740.200943] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52014cf3-866f-bab8-09bb-3115776fbbd4" [ 1740.200943] env[62820]: _type = "Task" [ 1740.200943] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.210782] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52014cf3-866f-bab8-09bb-3115776fbbd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.281160] env[62820]: DEBUG oslo_vmware.api [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696177, 'name': PowerOnVM_Task, 'duration_secs': 0.658303} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.281502] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1740.281502] env[62820]: INFO nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1740.281651] env[62820]: DEBUG nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1740.282668] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e492ca6c-09e0-45ee-911d-a7786bdb4664 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.393323] env[62820]: DEBUG nova.network.neutron [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating instance_info_cache with network_info: [{"id": "89ab754d-6988-4b28-882b-5f352eda86ec", "address": "fa:16:3e:6f:b6:6d", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ab754d-69", "ovs_interfaceid": "89ab754d-6988-4b28-882b-5f352eda86ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.408013] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.408239] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 1740.408791] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.408968] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.409132] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.409283] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.409513] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.409667] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.409799] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1740.409930] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.461111] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.208202} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.461111] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1740.461643] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b28b7a-1e02-4206-8f98-f2c9327210cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.484601] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 4323e7df-136f-4bbe-8160-fd7b2579727e/4323e7df-136f-4bbe-8160-fd7b2579727e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1740.484916] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57621e77-6a19-4fe3-be16-f9ab90dd1be5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.508072] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1740.508072] env[62820]: value = "task-1696180" [ 1740.508072] env[62820]: _type = "Task" [ 1740.508072] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.516993] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696180, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.579897] env[62820]: DEBUG oslo_concurrency.lockutils [None req-02ff62c9-7a12-41c6-98bd-41a112a01aee tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 13.642s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.601822] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1481b2d4-d46b-4e1a-a9d4-b6507d15bd36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.622432] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance '210277a2-dd10-4e08-8627-4b025a554410' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1740.639059] env[62820]: DEBUG oslo_concurrency.lockutils [req-a0fa5d84-88c5-45c4-90d8-429cb8ca5d54 req-6cf97607-d85a-47bf-90a6-ea85493e2b29 service nova] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.712900] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52014cf3-866f-bab8-09bb-3115776fbbd4, 'name': SearchDatastore_Task, 'duration_secs': 0.084142} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.713678] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54769699-975f-405e-a356-194476472216 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.719626] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1740.719626] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a054bf-50f3-96f2-9f16-368246021f27" [ 1740.719626] env[62820]: _type = "Task" [ 1740.719626] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.727919] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a054bf-50f3-96f2-9f16-368246021f27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.799833] env[62820]: INFO nova.compute.manager [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Took 19.42 seconds to build instance. [ 1740.896372] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.913194] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.913572] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.913857] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.914084] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1740.914957] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d903ce0d-34bc-4e73-8380-d5880abe7cc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.923634] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe8b4f7-4605-4953-a3ef-52fa73f84931 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.942412] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792583ac-2605-41f3-ab0c-0fe5bb9a4d17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.950169] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e118b411-dc38-4d66-90cc-3b015de9a6e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.980998] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178888MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1740.981300] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.981376] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.018696] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696180, 'name': ReconfigVM_Task, 'duration_secs': 0.282632} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.019009] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 4323e7df-136f-4bbe-8160-fd7b2579727e/4323e7df-136f-4bbe-8160-fd7b2579727e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1741.019664] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d7fe4cc-0ba4-4d19-b0fe-8501155ae458 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.029093] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1741.029093] env[62820]: value = "task-1696181" [ 1741.029093] env[62820]: _type = "Task" [ 1741.029093] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.045453] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696181, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.128412] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1741.128903] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b145cae-f09e-40f8-ad1c-52a889b0dfcd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.137610] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1741.137610] env[62820]: value = "task-1696182" [ 1741.137610] env[62820]: _type = "Task" [ 1741.137610] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.148633] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696182, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.232127] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a054bf-50f3-96f2-9f16-368246021f27, 'name': SearchDatastore_Task, 'duration_secs': 0.010845} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.233433] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.233433] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9c0d9676-9db9-4be2-a8e6-84bd816234aa/9c0d9676-9db9-4be2-a8e6-84bd816234aa.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1741.233433] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-134106b6-b743-43fd-a0ca-47871368b942 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.243288] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1741.243288] env[62820]: value = "task-1696183" [ 1741.243288] env[62820]: _type = "Task" [ 1741.243288] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.253937] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.302435] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ef5a8dc-5448-45eb-8c2b-e95166c0f0bf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.937s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.369093] env[62820]: DEBUG nova.compute.manager [req-6d98604d-f50b-4749-be80-c9e07c231a1b req-593f7e99-ddcc-4cf0-b97a-0bea6c8fb56e service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received event network-vif-unplugged-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1741.369339] env[62820]: DEBUG oslo_concurrency.lockutils [req-6d98604d-f50b-4749-be80-c9e07c231a1b req-593f7e99-ddcc-4cf0-b97a-0bea6c8fb56e service nova] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.369546] env[62820]: DEBUG oslo_concurrency.lockutils [req-6d98604d-f50b-4749-be80-c9e07c231a1b req-593f7e99-ddcc-4cf0-b97a-0bea6c8fb56e service nova] Lock "a8486f52-998d-4308-813a-9c651e2eb093-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.369710] env[62820]: DEBUG oslo_concurrency.lockutils [req-6d98604d-f50b-4749-be80-c9e07c231a1b req-593f7e99-ddcc-4cf0-b97a-0bea6c8fb56e service nova] Lock "a8486f52-998d-4308-813a-9c651e2eb093-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.369881] env[62820]: DEBUG nova.compute.manager [req-6d98604d-f50b-4749-be80-c9e07c231a1b req-593f7e99-ddcc-4cf0-b97a-0bea6c8fb56e service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] No waiting events found dispatching network-vif-unplugged-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1741.370254] env[62820]: WARNING nova.compute.manager [req-6d98604d-f50b-4749-be80-c9e07c231a1b req-593f7e99-ddcc-4cf0-b97a-0bea6c8fb56e service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received unexpected event network-vif-unplugged-89ab754d-6988-4b28-882b-5f352eda86ec for instance with vm_state shelved and task_state shelving_offloading. [ 1741.409805] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1741.410724] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55beee80-c5d7-4f5d-b790-be62ee90cd16 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.419531] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1741.419804] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16403ca5-75eb-4874-939c-b71d7127c25f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.539155] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1741.539732] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1741.539732] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleting the datastore file [datastore1] a8486f52-998d-4308-813a-9c651e2eb093 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1741.543964] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-727cb544-7cf8-4229-b436-1e1094177941 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.548925] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696181, 'name': Rename_Task, 'duration_secs': 0.158685} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.548925] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1741.548925] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f3d7184-3c62-4d18-911a-9031414e889d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.555206] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1741.555206] env[62820]: value = "task-1696185" [ 1741.555206] env[62820]: _type = "Task" [ 1741.555206] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.560448] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1741.560448] env[62820]: value = "task-1696186" [ 1741.560448] env[62820]: _type = "Task" [ 1741.560448] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.567526] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.574325] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696186, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.651375] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1741.651760] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance '210277a2-dd10-4e08-8627-4b025a554410' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1741.759551] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696183, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.932437] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.932742] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.933061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.933296] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.933560] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.935782] env[62820]: INFO nova.compute.manager [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Terminating instance [ 1741.995815] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Applying migration context for instance 210277a2-dd10-4e08-8627-4b025a554410 as it has an incoming, in-progress migration cf85995d-0f02-4aa1-b5bb-a279754babe4. Migration status is migrating {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1741.997787] env[62820]: INFO nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating resource usage from migration cf85995d-0f02-4aa1-b5bb-a279754babe4 [ 1742.020217] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b89d32f8-0675-4b0c-977e-b7900e62bdd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.020338] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance eafe98b7-a67d-4bab-bfc0-8367ae069d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.020467] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0dd0e112-7a7c-4b37-8938-bb98aab2d485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.020576] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a8486f52-998d-4308-813a-9c651e2eb093 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.020692] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.020824] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 3228cd34-2144-425a-aca6-400cb0991e43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.020929] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 6da857ea-f213-4b17-9e9f-d74d1ea649c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.021065] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 03b0abc8-dd32-4cf9-8750-d64b8a66695e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.021179] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 76bd4a09-300d-460e-8442-21b4f6567698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.021287] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 860637a2-8c59-42af-a9f5-4e80c5466274 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.021404] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance e420644c-cfcc-4f8c-ae03-c9ebef585690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.021514] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 82379c63-8dce-4b61-afb9-9b6a5ff605b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.021619] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4323e7df-136f-4bbe-8160-fd7b2579727e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.021725] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 9c0d9676-9db9-4be2-a8e6-84bd816234aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.021829] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Migration cf85995d-0f02-4aa1-b5bb-a279754babe4 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1742.021933] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 210277a2-dd10-4e08-8627-4b025a554410 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.022164] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1742.022298] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3712MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1742.064552] env[62820]: DEBUG oslo_vmware.api [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.4979} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.070147] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1742.070357] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1742.070540] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1742.077774] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696186, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.088470] env[62820]: INFO nova.scheduler.client.report [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted allocations for instance a8486f52-998d-4308-813a-9c651e2eb093 [ 1742.160777] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1742.161045] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1742.161285] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1742.161507] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1742.161681] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1742.161835] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1742.162058] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1742.162233] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1742.162383] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1742.162550] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1742.162723] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1742.168379] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-049b2c5c-5fa3-4f04-a562-7815ead7b615 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.190428] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1742.190428] env[62820]: value = "task-1696187" [ 1742.190428] env[62820]: _type = "Task" [ 1742.190428] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.204722] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696187, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.218407] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "860637a2-8c59-42af-a9f5-4e80c5466274" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.218702] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.218940] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.219163] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.219382] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.221832] env[62820]: INFO nova.compute.manager [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Terminating instance [ 1742.256345] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576222} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.256650] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 9c0d9676-9db9-4be2-a8e6-84bd816234aa/9c0d9676-9db9-4be2-a8e6-84bd816234aa.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1742.256850] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1742.257120] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-504fa3fd-5fa2-479e-aff2-da37856d56c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.264832] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3653fe-0f2e-41d7-8491-40d260e76154 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.269811] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1742.269811] env[62820]: value = "task-1696188" [ 1742.269811] env[62820]: _type = "Task" [ 1742.269811] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.276672] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5937ff6d-9b1c-441b-819f-0709ba1a67b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.283258] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.311743] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa006031-ef9a-4767-9ff7-67597c60c3bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.321043] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302d9c29-ea6b-4643-a44c-8796f7610755 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.335523] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.439547] env[62820]: DEBUG nova.compute.manager [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1742.439811] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1742.440736] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03394032-ff22-4df2-8f02-5e509f1b0681 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.449605] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1742.449899] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84ef7727-c746-4b86-adda-07bf67377364 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.458391] env[62820]: DEBUG oslo_vmware.api [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1742.458391] env[62820]: value = "task-1696189" [ 1742.458391] env[62820]: _type = "Task" [ 1742.458391] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.468443] env[62820]: DEBUG oslo_vmware.api [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.576537] env[62820]: DEBUG oslo_vmware.api [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696186, 'name': PowerOnVM_Task, 'duration_secs': 0.592799} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.576906] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1742.577149] env[62820]: INFO nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1742.577333] env[62820]: DEBUG nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1742.578156] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d8a9d4-36d9-4a45-a60a-5a2e9e92e5db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.594931] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.701428] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696187, 'name': ReconfigVM_Task, 'duration_secs': 0.423306} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.702923] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance '210277a2-dd10-4e08-8627-4b025a554410' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1742.708716] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "35b95400-6399-48ae-b7d5-420c33d653dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.708945] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.726569] env[62820]: DEBUG nova.compute.manager [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1742.726812] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1742.727932] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e488915c-2a73-4090-b4d9-66964e10641d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.736837] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1742.737111] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08f222e8-66a3-4015-84d8-09b1c8490e2d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.746026] env[62820]: DEBUG oslo_vmware.api [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1742.746026] env[62820]: value = "task-1696190" [ 1742.746026] env[62820]: _type = "Task" [ 1742.746026] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.757833] env[62820]: DEBUG oslo_vmware.api [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.780923] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203088} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.781230] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1742.782132] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cf67a6-4403-4691-803c-dc58ccdb1b83 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.810051] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 9c0d9676-9db9-4be2-a8e6-84bd816234aa/9c0d9676-9db9-4be2-a8e6-84bd816234aa.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1742.813981] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e657f7c6-b28d-44b8-a7b9-82b3fc5351a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.833404] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1742.833404] env[62820]: value = "task-1696191" [ 1742.833404] env[62820]: _type = "Task" [ 1742.833404] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.839175] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1742.847142] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696191, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.971160] env[62820]: DEBUG oslo_vmware.api [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696189, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.101817] env[62820]: INFO nova.compute.manager [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Took 18.68 seconds to build instance. [ 1743.211777] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1743.212088] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1743.212253] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1743.212439] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1743.212587] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1743.212734] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1743.212945] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1743.213117] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1743.213286] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1743.213448] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1743.213620] env[62820]: DEBUG nova.virt.hardware [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1743.219375] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Reconfiguring VM instance instance-0000003c to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1743.219799] env[62820]: DEBUG nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1743.222296] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-744ceba3-07c9-4385-9fe2-1e09bd68aa15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.245132] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1743.245132] env[62820]: value = "task-1696192" [ 1743.245132] env[62820]: _type = "Task" [ 1743.245132] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.259593] env[62820]: DEBUG oslo_vmware.api [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696190, 'name': PowerOffVM_Task, 'duration_secs': 0.358668} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.263604] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1743.263828] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1743.264362] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.264575] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-029692ba-3f9d-4758-a6fd-f612783d4fbd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.346842] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.348713] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1743.348960] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.368s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.349204] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.755s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.349472] env[62820]: DEBUG nova.objects.instance [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lazy-loading 'resources' on Instance uuid a8486f52-998d-4308-813a-9c651e2eb093 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1743.470687] env[62820]: DEBUG oslo_vmware.api [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696189, 'name': PowerOffVM_Task, 'duration_secs': 0.636879} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.470999] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1743.471424] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1743.471710] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebd59887-f574-4d15-ab4f-c936cc1fdb4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.553479] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1743.553717] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1743.553901] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleting the datastore file [datastore1] 860637a2-8c59-42af-a9f5-4e80c5466274 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1743.555060] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31c28d62-eed4-46af-9177-94dd09ec8cd6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.564943] env[62820]: DEBUG oslo_vmware.api [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1743.564943] env[62820]: value = "task-1696195" [ 1743.564943] env[62820]: _type = "Task" [ 1743.564943] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.575091] env[62820]: DEBUG oslo_vmware.api [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.600546] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1743.600546] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1743.600874] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleting the datastore file [datastore1] 82379c63-8dce-4b61-afb9-9b6a5ff605b5 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1743.601216] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88e69891-3f9d-435e-ad09-e3a4e37c47e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.605025] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b644d44b-c8e3-4cc5-ae48-4cbeb2133d9f tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.195s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.610085] env[62820]: DEBUG oslo_vmware.api [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1743.610085] env[62820]: value = "task-1696196" [ 1743.610085] env[62820]: _type = "Task" [ 1743.610085] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.618382] env[62820]: DEBUG oslo_vmware.api [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.627612] env[62820]: DEBUG nova.compute.manager [req-97e92d31-8777-4d61-a4ba-8e487c7ad4cf req-82d92fbf-5d91-4da9-a31d-d604a2e3a3a9 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received event network-changed-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1743.627799] env[62820]: DEBUG nova.compute.manager [req-97e92d31-8777-4d61-a4ba-8e487c7ad4cf req-82d92fbf-5d91-4da9-a31d-d604a2e3a3a9 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Refreshing instance network info cache due to event network-changed-89ab754d-6988-4b28-882b-5f352eda86ec. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1743.628022] env[62820]: DEBUG oslo_concurrency.lockutils [req-97e92d31-8777-4d61-a4ba-8e487c7ad4cf req-82d92fbf-5d91-4da9-a31d-d604a2e3a3a9 service nova] Acquiring lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.628735] env[62820]: DEBUG oslo_concurrency.lockutils [req-97e92d31-8777-4d61-a4ba-8e487c7ad4cf req-82d92fbf-5d91-4da9-a31d-d604a2e3a3a9 service nova] Acquired lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.628735] env[62820]: DEBUG nova.network.neutron [req-97e92d31-8777-4d61-a4ba-8e487c7ad4cf req-82d92fbf-5d91-4da9-a31d-d604a2e3a3a9 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Refreshing network info cache for port 89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1743.752871] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.756506] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696192, 'name': ReconfigVM_Task, 'duration_secs': 0.315912} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.756779] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Reconfigured VM instance instance-0000003c to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1743.757582] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f575e7-05cc-457d-ba20-8b2fd9e37b7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.780131] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 210277a2-dd10-4e08-8627-4b025a554410/210277a2-dd10-4e08-8627-4b025a554410.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1743.780411] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-232ac1c4-02e9-472f-aeb3-dc2c05f41b56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.799371] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1743.799371] env[62820]: value = "task-1696197" [ 1743.799371] env[62820]: _type = "Task" [ 1743.799371] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.807675] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.845098] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696191, 'name': ReconfigVM_Task, 'duration_secs': 0.853359} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.845410] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 9c0d9676-9db9-4be2-a8e6-84bd816234aa/9c0d9676-9db9-4be2-a8e6-84bd816234aa.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1743.846154] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d82621a-06df-43a0-976e-44cbc87f81c4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.852795] env[62820]: DEBUG nova.objects.instance [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lazy-loading 'numa_topology' on Instance uuid a8486f52-998d-4308-813a-9c651e2eb093 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1743.855243] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1743.855243] env[62820]: value = "task-1696198" [ 1743.855243] env[62820]: _type = "Task" [ 1743.855243] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.866561] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696198, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.064164] env[62820]: DEBUG nova.compute.manager [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1744.077635] env[62820]: DEBUG oslo_vmware.api [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167322} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.078244] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1744.078244] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1744.078345] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1744.078715] env[62820]: INFO nova.compute.manager [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1744.078782] env[62820]: DEBUG oslo.service.loopingcall [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1744.078966] env[62820]: DEBUG nova.compute.manager [-] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1744.079070] env[62820]: DEBUG nova.network.neutron [-] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1744.122649] env[62820]: DEBUG oslo_vmware.api [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147903} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.122958] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1744.123119] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1744.123301] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1744.123470] env[62820]: INFO nova.compute.manager [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1744.123703] env[62820]: DEBUG oslo.service.loopingcall [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1744.124190] env[62820]: DEBUG nova.compute.manager [-] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1744.124190] env[62820]: DEBUG nova.network.neutron [-] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1744.311191] env[62820]: DEBUG oslo_vmware.api [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696197, 'name': ReconfigVM_Task, 'duration_secs': 0.385849} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.311529] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 210277a2-dd10-4e08-8627-4b025a554410/210277a2-dd10-4e08-8627-4b025a554410.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1744.311776] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance '210277a2-dd10-4e08-8627-4b025a554410' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1744.356537] env[62820]: DEBUG nova.objects.base [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1744.379342] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696198, 'name': Rename_Task, 'duration_secs': 0.166934} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.380717] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1744.381017] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a616f3d2-050d-4b29-8498-1e038c460b4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.392801] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1744.392801] env[62820]: value = "task-1696199" [ 1744.392801] env[62820]: _type = "Task" [ 1744.392801] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.406444] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696199, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.599122] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.618645] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.643588] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.643842] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.644064] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.644255] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.644422] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.646925] env[62820]: INFO nova.compute.manager [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Terminating instance [ 1744.659797] env[62820]: DEBUG nova.network.neutron [req-97e92d31-8777-4d61-a4ba-8e487c7ad4cf req-82d92fbf-5d91-4da9-a31d-d604a2e3a3a9 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updated VIF entry in instance network info cache for port 89ab754d-6988-4b28-882b-5f352eda86ec. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1744.659968] env[62820]: DEBUG nova.network.neutron [req-97e92d31-8777-4d61-a4ba-8e487c7ad4cf req-82d92fbf-5d91-4da9-a31d-d604a2e3a3a9 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating instance_info_cache with network_info: [{"id": "89ab754d-6988-4b28-882b-5f352eda86ec", "address": "fa:16:3e:6f:b6:6d", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap89ab754d-69", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.681791] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0160f436-58c4-4fed-96c2-f97fed219b19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.692499] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7241391-d4d6-4f3b-a9b5-a2ef4ca79ed5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.726358] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38dfab9c-90dd-4cd3-9423-9723181cb0b3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.737026] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75507695-acc2-4ffe-8033-72b40a6036a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.749984] env[62820]: DEBUG nova.compute.provider_tree [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1744.820847] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f22203-8e62-4ce0-8135-a9c2f46b931e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.843551] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7b08c4-cdd8-490a-91f7-3b7885d7547b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.865395] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance '210277a2-dd10-4e08-8627-4b025a554410' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1744.906832] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696199, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.105612] env[62820]: DEBUG nova.network.neutron [-] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1745.107254] env[62820]: DEBUG nova.network.neutron [-] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1745.150279] env[62820]: DEBUG nova.compute.manager [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1745.150566] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1745.151624] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfe5976-e29d-4a4e-ae8f-4852548fe0f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.160589] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1745.160855] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d56e1f47-6ec5-476a-9584-67f067338cb7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.162709] env[62820]: DEBUG oslo_concurrency.lockutils [req-97e92d31-8777-4d61-a4ba-8e487c7ad4cf req-82d92fbf-5d91-4da9-a31d-d604a2e3a3a9 service nova] Releasing lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.168812] env[62820]: DEBUG oslo_vmware.api [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1745.168812] env[62820]: value = "task-1696200" [ 1745.168812] env[62820]: _type = "Task" [ 1745.168812] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.178492] env[62820]: DEBUG oslo_vmware.api [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.274525] env[62820]: ERROR nova.scheduler.client.report [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [req-21f192c0-c22e-40dd-bfbc-5a420864a942] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21f192c0-c22e-40dd-bfbc-5a420864a942"}]} [ 1745.291947] env[62820]: DEBUG nova.scheduler.client.report [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1745.311107] env[62820]: DEBUG nova.scheduler.client.report [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1745.311393] env[62820]: DEBUG nova.compute.provider_tree [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1745.325661] env[62820]: DEBUG nova.scheduler.client.report [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1745.346407] env[62820]: DEBUG nova.scheduler.client.report [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1745.404391] env[62820]: DEBUG nova.network.neutron [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Port 66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1745.409624] env[62820]: DEBUG oslo_vmware.api [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696199, 'name': PowerOnVM_Task, 'duration_secs': 0.763211} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.412525] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1745.412682] env[62820]: INFO nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1745.412830] env[62820]: DEBUG nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1745.414301] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfda0b8e-8b29-45dd-8823-c1f96b5e3266 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.588453] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59b7288-3ad3-4923-b082-3faa50aff7f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.596882] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e20efa1-50a8-453d-95f7-f2bb0357339e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.626608] env[62820]: INFO nova.compute.manager [-] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Took 1.50 seconds to deallocate network for instance. [ 1745.626919] env[62820]: INFO nova.compute.manager [-] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Took 1.55 seconds to deallocate network for instance. [ 1745.630174] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6fff7f-61de-4d21-867e-4b4671554452 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.643382] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5ab541-15d1-4ec7-adda-b6a78167900c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.659707] env[62820]: DEBUG nova.compute.provider_tree [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1745.663565] env[62820]: DEBUG nova.compute.manager [req-eb3bcc5b-27cc-474f-b0bb-197675d8346f req-1ba8d3a4-bcc8-4e88-9bd7-b148d87b01e3 service nova] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Received event network-vif-deleted-2e8ab641-c961-452e-a6eb-d760374ac2b2 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1745.663765] env[62820]: DEBUG nova.compute.manager [req-eb3bcc5b-27cc-474f-b0bb-197675d8346f req-1ba8d3a4-bcc8-4e88-9bd7-b148d87b01e3 service nova] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Received event network-vif-deleted-d7eba315-055d-4512-aa37-ec25ee38d9d0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1745.678693] env[62820]: DEBUG oslo_vmware.api [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696200, 'name': PowerOffVM_Task, 'duration_secs': 0.185323} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.678945] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1745.679128] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1745.679426] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a3bd1e3-b218-4836-9707-82d1b6a1c174 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.781248] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1745.781479] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1745.781667] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleting the datastore file [datastore1] e420644c-cfcc-4f8c-ae03-c9ebef585690 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1745.781943] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0c820ac-e922-4b04-90ae-ec067759a595 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.788517] env[62820]: DEBUG oslo_vmware.api [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for the task: (returnval){ [ 1745.788517] env[62820]: value = "task-1696202" [ 1745.788517] env[62820]: _type = "Task" [ 1745.788517] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.797216] env[62820]: DEBUG oslo_vmware.api [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696202, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.938750] env[62820]: INFO nova.compute.manager [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Took 17.32 seconds to build instance. [ 1746.139225] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.141156] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.199614] env[62820]: DEBUG nova.scheduler.client.report [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 138 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1746.199909] env[62820]: DEBUG nova.compute.provider_tree [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 138 to 139 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1746.200112] env[62820]: DEBUG nova.compute.provider_tree [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1746.299797] env[62820]: DEBUG oslo_vmware.api [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Task: {'id': task-1696202, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138461} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.300123] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1746.300317] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1746.300492] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1746.300667] env[62820]: INFO nova.compute.manager [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1746.300969] env[62820]: DEBUG oslo.service.loopingcall [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1746.301197] env[62820]: DEBUG nova.compute.manager [-] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1746.301322] env[62820]: DEBUG nova.network.neutron [-] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1746.429639] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "210277a2-dd10-4e08-8627-4b025a554410-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.429918] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.430132] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.440505] env[62820]: DEBUG oslo_concurrency.lockutils [None req-75eeb60c-721e-4560-a030-acc685935992 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.831s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.546330] env[62820]: DEBUG nova.compute.manager [req-92928474-be2a-4c6b-aff1-9c3e2389cb38 req-7df4b39d-f91c-4d3e-a7fc-9909e364f024 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Received event network-vif-deleted-8c81ac6d-fc1a-4519-81f6-1a3a523acee9 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1746.547020] env[62820]: INFO nova.compute.manager [req-92928474-be2a-4c6b-aff1-9c3e2389cb38 req-7df4b39d-f91c-4d3e-a7fc-9909e364f024 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Neutron deleted interface 8c81ac6d-fc1a-4519-81f6-1a3a523acee9; detaching it from the instance and deleting it from the info cache [ 1746.547020] env[62820]: DEBUG nova.network.neutron [req-92928474-be2a-4c6b-aff1-9c3e2389cb38 req-7df4b39d-f91c-4d3e-a7fc-9909e364f024 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.707024] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.355s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.708528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.954s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.710209] env[62820]: INFO nova.compute.claims [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1747.019183] env[62820]: DEBUG nova.network.neutron [-] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.050093] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15eae12f-d593-4670-83d0-d25c5639e710 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.061479] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada37a3c-63fb-4841-937f-c99050b4b59a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.098555] env[62820]: DEBUG nova.compute.manager [req-92928474-be2a-4c6b-aff1-9c3e2389cb38 req-7df4b39d-f91c-4d3e-a7fc-9909e364f024 service nova] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Detach interface failed, port_id=8c81ac6d-fc1a-4519-81f6-1a3a523acee9, reason: Instance e420644c-cfcc-4f8c-ae03-c9ebef585690 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1747.219480] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56373d81-2bb9-4b8e-b530-8c1eb48a074b tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.940s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.220282] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.603s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.220471] env[62820]: INFO nova.compute.manager [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Unshelving [ 1747.345481] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.345481] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.465812] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.466119] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.466210] env[62820]: DEBUG nova.network.neutron [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1747.521763] env[62820]: INFO nova.compute.manager [-] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Took 1.22 seconds to deallocate network for instance. [ 1747.683946] env[62820]: DEBUG nova.compute.manager [req-15f6218b-658a-4ab9-bbdf-f4e6e6b7a3f2 req-4f2ac385-8820-4c67-a06e-674883ae6efa service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1747.684180] env[62820]: DEBUG nova.compute.manager [req-15f6218b-658a-4ab9-bbdf-f4e6e6b7a3f2 req-4f2ac385-8820-4c67-a06e-674883ae6efa service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing instance network info cache due to event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1747.684397] env[62820]: DEBUG oslo_concurrency.lockutils [req-15f6218b-658a-4ab9-bbdf-f4e6e6b7a3f2 req-4f2ac385-8820-4c67-a06e-674883ae6efa service nova] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.684550] env[62820]: DEBUG oslo_concurrency.lockutils [req-15f6218b-658a-4ab9-bbdf-f4e6e6b7a3f2 req-4f2ac385-8820-4c67-a06e-674883ae6efa service nova] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.684720] env[62820]: DEBUG nova.network.neutron [req-15f6218b-658a-4ab9-bbdf-f4e6e6b7a3f2 req-4f2ac385-8820-4c67-a06e-674883ae6efa service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1747.847699] env[62820]: DEBUG nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1747.973903] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e59097-5eb3-474a-8759-90f7e4d918c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.982395] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484fe945-5927-432e-bbcd-7da06b25863d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.015363] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bf839c-0354-4e03-a66b-aba074cf6c20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.027521] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea1d094-d41d-4299-9233-d5486046cf3c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.032293] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.046749] env[62820]: DEBUG nova.compute.provider_tree [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1748.230140] env[62820]: DEBUG nova.network.neutron [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance_info_cache with network_info: [{"id": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "address": "fa:16:3e:e8:03:6c", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ea13ce-9e", "ovs_interfaceid": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.233659] env[62820]: DEBUG nova.compute.utils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1748.367522] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.388701] env[62820]: DEBUG nova.network.neutron [req-15f6218b-658a-4ab9-bbdf-f4e6e6b7a3f2 req-4f2ac385-8820-4c67-a06e-674883ae6efa service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updated VIF entry in instance network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1748.389090] env[62820]: DEBUG nova.network.neutron [req-15f6218b-658a-4ab9-bbdf-f4e6e6b7a3f2 req-4f2ac385-8820-4c67-a06e-674883ae6efa service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.550160] env[62820]: DEBUG nova.scheduler.client.report [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1748.736362] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1748.740473] env[62820]: INFO nova.virt.block_device [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Booting with volume 1302a61c-1765-4676-9304-76b004523986 at /dev/sdb [ 1748.775043] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd8d75d9-8f10-4c8a-a06c-9e956a0c8093 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.786295] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ed2071-dc45-492d-86e6-9bd03f3fc69b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.817969] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b39f7bbe-776a-4a71-82b1-d58ed289ef8e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.828245] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed848d0a-32dc-43f1-bfa2-bf702584127d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.861764] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a08c9e-6ae7-4f3d-8087-7486a20fbcc1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.868595] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53b0553-055a-4dfe-8f2c-1e1a9dcb7f78 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.882490] env[62820]: DEBUG nova.virt.block_device [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating existing volume attachment record: 944d1f0d-5442-4324-b0f6-368eefbc9508 {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1748.891299] env[62820]: DEBUG oslo_concurrency.lockutils [req-15f6218b-658a-4ab9-bbdf-f4e6e6b7a3f2 req-4f2ac385-8820-4c67-a06e-674883ae6efa service nova] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.055349] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.055885] env[62820]: DEBUG nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1749.058692] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.460s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.263717] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0051a64e-29f0-4e07-abbc-01db73b1190c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.284285] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa521c89-8d25-404b-8169-b22a5fd6895d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.291587] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance '210277a2-dd10-4e08-8627-4b025a554410' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1749.562602] env[62820]: DEBUG nova.compute.utils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1749.564999] env[62820]: DEBUG nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1749.564999] env[62820]: DEBUG nova.network.neutron [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1749.569455] env[62820]: INFO nova.compute.claims [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1749.617144] env[62820]: DEBUG nova.policy [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc838df5682041ed97e19ce34d9f14ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a07ed2a19149b3a58ee43a07e13bba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1749.797843] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d075aa-54c0-490c-b38d-19be7f82e845 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance '210277a2-dd10-4e08-8627-4b025a554410' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1749.913705] env[62820]: DEBUG nova.network.neutron [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Successfully created port: 778fee60-5af2-4328-a536-56882267761d {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1750.074951] env[62820]: DEBUG nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1750.081871] env[62820]: INFO nova.compute.resource_tracker [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating resource usage from migration a4086721-d70e-446a-bef4-66aa59f5e32e [ 1750.404870] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa477a4a-2221-451f-bc66-17464fdc0cb1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.414187] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e0b23b-fea5-4f24-89a8-7b44365cf621 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.446440] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26232b1-33c3-4f46-a4c0-67ba1a64f9d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.455283] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65af3f9c-82e8-4052-b8e7-ba1a5fd29b7e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.471070] env[62820]: DEBUG nova.compute.provider_tree [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1750.974058] env[62820]: DEBUG nova.scheduler.client.report [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1751.092102] env[62820]: DEBUG nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1751.120328] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1751.120328] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1751.120328] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1751.120670] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1751.120670] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1751.120761] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1751.120988] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1751.121520] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1751.121520] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1751.121520] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1751.121686] env[62820]: DEBUG nova.virt.hardware [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1751.122893] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ae246c-178d-45bf-a5c6-309b16790c21 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.131674] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb57322-dc70-4a36-9b89-efc3fe3c4b66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.479184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.420s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.479438] env[62820]: INFO nova.compute.manager [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Migrating [ 1751.492758] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.353s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.493063] env[62820]: DEBUG nova.objects.instance [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lazy-loading 'resources' on Instance uuid 860637a2-8c59-42af-a9f5-4e80c5466274 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1751.535016] env[62820]: DEBUG nova.compute.manager [req-9f1154a2-e011-437b-8eac-f1f2fc0da20b req-c7b432ff-e660-44e1-8648-811c05487f1c service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Received event network-vif-plugged-778fee60-5af2-4328-a536-56882267761d {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1751.535247] env[62820]: DEBUG oslo_concurrency.lockutils [req-9f1154a2-e011-437b-8eac-f1f2fc0da20b req-c7b432ff-e660-44e1-8648-811c05487f1c service nova] Acquiring lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.535466] env[62820]: DEBUG oslo_concurrency.lockutils [req-9f1154a2-e011-437b-8eac-f1f2fc0da20b req-c7b432ff-e660-44e1-8648-811c05487f1c service nova] Lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.535631] env[62820]: DEBUG oslo_concurrency.lockutils [req-9f1154a2-e011-437b-8eac-f1f2fc0da20b req-c7b432ff-e660-44e1-8648-811c05487f1c service nova] Lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.535797] env[62820]: DEBUG nova.compute.manager [req-9f1154a2-e011-437b-8eac-f1f2fc0da20b req-c7b432ff-e660-44e1-8648-811c05487f1c service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] No waiting events found dispatching network-vif-plugged-778fee60-5af2-4328-a536-56882267761d {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1751.535959] env[62820]: WARNING nova.compute.manager [req-9f1154a2-e011-437b-8eac-f1f2fc0da20b req-c7b432ff-e660-44e1-8648-811c05487f1c service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Received unexpected event network-vif-plugged-778fee60-5af2-4328-a536-56882267761d for instance with vm_state building and task_state spawning. [ 1751.602573] env[62820]: DEBUG nova.network.neutron [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Successfully updated port: 778fee60-5af2-4328-a536-56882267761d {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1751.999210] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.999435] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.999607] env[62820]: DEBUG nova.network.neutron [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1752.016380] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "210277a2-dd10-4e08-8627-4b025a554410" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.016644] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.016860] env[62820]: DEBUG nova.compute.manager [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Going to confirm migration 4 {{(pid=62820) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5127}} [ 1752.105503] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.105656] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.105803] env[62820]: DEBUG nova.network.neutron [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1752.230388] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4532c4-1f35-4310-b602-2210a6a7f429 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.239206] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c294f8-7abb-4d54-938d-1eb82cad2fc8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.269413] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650e0b7b-2916-4e29-9fc4-807ce0d2961c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.277954] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92a1400-5bd1-447f-99d7-3ffdbe90acbb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.293027] env[62820]: DEBUG nova.compute.provider_tree [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1752.552668] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.552920] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.553035] env[62820]: DEBUG nova.network.neutron [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1752.553248] env[62820]: DEBUG nova.objects.instance [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'info_cache' on Instance uuid 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1752.650260] env[62820]: DEBUG nova.network.neutron [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1752.740546] env[62820]: DEBUG nova.network.neutron [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance_info_cache with network_info: [{"id": "8cea8850-c5a0-4831-99cc-8920c44710b7", "address": "fa:16:3e:a4:96:78", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cea8850-c5", "ovs_interfaceid": "8cea8850-c5a0-4831-99cc-8920c44710b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.779592] env[62820]: DEBUG nova.network.neutron [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance_info_cache with network_info: [{"id": "778fee60-5af2-4328-a536-56882267761d", "address": "fa:16:3e:92:ed:c9", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap778fee60-5a", "ovs_interfaceid": "778fee60-5af2-4328-a536-56882267761d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.812773] env[62820]: ERROR nova.scheduler.client.report [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [req-7fc98d15-63fe-458c-8903-6fef4db9f7e0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7fc98d15-63fe-458c-8903-6fef4db9f7e0"}]} [ 1752.829121] env[62820]: DEBUG nova.scheduler.client.report [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1752.842312] env[62820]: DEBUG nova.scheduler.client.report [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1752.842534] env[62820]: DEBUG nova.compute.provider_tree [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1752.854808] env[62820]: DEBUG nova.scheduler.client.report [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1752.874493] env[62820]: DEBUG nova.scheduler.client.report [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1753.103602] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f34a671-70c4-42dd-8bb7-c0b50ff375bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.111923] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee77840-2455-4e18-a99f-f7e2498d3cfa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.141512] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293bb6cd-6265-4bb9-8365-3808c7861a3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.149177] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a6bcc0-8a93-4431-a250-494c40436af9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.163794] env[62820]: DEBUG nova.compute.provider_tree [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1753.243514] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.283061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.283368] env[62820]: DEBUG nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Instance network_info: |[{"id": "778fee60-5af2-4328-a536-56882267761d", "address": "fa:16:3e:92:ed:c9", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap778fee60-5a", "ovs_interfaceid": "778fee60-5af2-4328-a536-56882267761d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1753.283781] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:ed:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a874c214-8cdf-4a41-a718-84262b2a28d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '778fee60-5af2-4328-a536-56882267761d', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1753.290996] env[62820]: DEBUG oslo.service.loopingcall [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1753.291423] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1753.291693] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9186fb0-6f42-4f20-b10c-5eaa8d65ded4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.311480] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1753.311480] env[62820]: value = "task-1696207" [ 1753.311480] env[62820]: _type = "Task" [ 1753.311480] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.318906] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696207, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.664788] env[62820]: DEBUG nova.compute.manager [req-d2dafefc-a4e9-406b-80b8-29adc3d58536 req-07845f48-9b4b-4571-a1dc-6e64dd0324fc service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Received event network-changed-778fee60-5af2-4328-a536-56882267761d {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1753.664788] env[62820]: DEBUG nova.compute.manager [req-d2dafefc-a4e9-406b-80b8-29adc3d58536 req-07845f48-9b4b-4571-a1dc-6e64dd0324fc service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Refreshing instance network info cache due to event network-changed-778fee60-5af2-4328-a536-56882267761d. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1753.664788] env[62820]: DEBUG oslo_concurrency.lockutils [req-d2dafefc-a4e9-406b-80b8-29adc3d58536 req-07845f48-9b4b-4571-a1dc-6e64dd0324fc service nova] Acquiring lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.664969] env[62820]: DEBUG oslo_concurrency.lockutils [req-d2dafefc-a4e9-406b-80b8-29adc3d58536 req-07845f48-9b4b-4571-a1dc-6e64dd0324fc service nova] Acquired lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.665232] env[62820]: DEBUG nova.network.neutron [req-d2dafefc-a4e9-406b-80b8-29adc3d58536 req-07845f48-9b4b-4571-a1dc-6e64dd0324fc service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Refreshing network info cache for port 778fee60-5af2-4328-a536-56882267761d {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1753.695153] env[62820]: DEBUG nova.scheduler.client.report [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1753.695428] env[62820]: DEBUG nova.compute.provider_tree [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 140 to 141 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1753.695644] env[62820]: DEBUG nova.compute.provider_tree [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1753.821697] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696207, 'name': CreateVM_Task, 'duration_secs': 0.422105} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.822139] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1753.822855] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.823031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.823358] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1753.823606] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80bfbb34-89d1-4e16-b72d-041a5243cad1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.829205] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1753.829205] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529e5844-d8f0-b641-3216-c206b6118415" [ 1753.829205] env[62820]: _type = "Task" [ 1753.829205] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.840203] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529e5844-d8f0-b641-3216-c206b6118415, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.874589] env[62820]: DEBUG nova.network.neutron [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance_info_cache with network_info: [{"id": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "address": "fa:16:3e:e8:03:6c", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ea13ce-9e", "ovs_interfaceid": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.201964] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.709s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.204012] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.063s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.204248] env[62820]: DEBUG nova.objects.instance [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lazy-loading 'resources' on Instance uuid 82379c63-8dce-4b61-afb9-9b6a5ff605b5 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.226590] env[62820]: INFO nova.scheduler.client.report [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted allocations for instance 860637a2-8c59-42af-a9f5-4e80c5466274 [ 1754.341549] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529e5844-d8f0-b641-3216-c206b6118415, 'name': SearchDatastore_Task, 'duration_secs': 0.011557} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.341841] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.342099] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1754.342343] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.342492] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.342675] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1754.342931] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-218e52b6-a04d-461e-bee4-92994a4fb0c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.353404] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1754.353591] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1754.356257] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23d9f305-97e7-4fcf-9974-f7b80a3b2189 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.362449] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1754.362449] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e40821-8dff-23a9-7c2e-470aadcd210f" [ 1754.362449] env[62820]: _type = "Task" [ 1754.362449] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.370526] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e40821-8dff-23a9-7c2e-470aadcd210f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.378096] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.378333] env[62820]: DEBUG nova.objects.instance [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'migration_context' on Instance uuid 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.392232] env[62820]: DEBUG nova.network.neutron [req-d2dafefc-a4e9-406b-80b8-29adc3d58536 req-07845f48-9b4b-4571-a1dc-6e64dd0324fc service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updated VIF entry in instance network info cache for port 778fee60-5af2-4328-a536-56882267761d. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1754.392569] env[62820]: DEBUG nova.network.neutron [req-d2dafefc-a4e9-406b-80b8-29adc3d58536 req-07845f48-9b4b-4571-a1dc-6e64dd0324fc service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance_info_cache with network_info: [{"id": "778fee60-5af2-4328-a536-56882267761d", "address": "fa:16:3e:92:ed:c9", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap778fee60-5a", "ovs_interfaceid": "778fee60-5af2-4328-a536-56882267761d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.735038] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6e515c7-21fc-417e-9c0f-f24055061a01 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "860637a2-8c59-42af-a9f5-4e80c5466274" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.516s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.759975] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056cbd41-e673-4bb2-bebe-b7aa688ff01d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.783110] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance '4323e7df-136f-4bbe-8160-fd7b2579727e' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1754.797526] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "a495b540-806d-4cd8-b340-86fe937867cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.797778] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.875129] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e40821-8dff-23a9-7c2e-470aadcd210f, 'name': SearchDatastore_Task, 'duration_secs': 0.009447} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.879100] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d76f68a9-b5e4-4a1a-8a5a-7fae7cc6c825 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.881079] env[62820]: DEBUG nova.objects.base [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Object Instance<210277a2-dd10-4e08-8627-4b025a554410> lazy-loaded attributes: info_cache,migration_context {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1754.881832] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58b33a1-93a8-46c3-b814-1aac8157492c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.904857] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1754.904857] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523cf5d9-d945-10f5-dd4e-c68f7080424b" [ 1754.904857] env[62820]: _type = "Task" [ 1754.904857] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.905374] env[62820]: DEBUG oslo_concurrency.lockutils [req-d2dafefc-a4e9-406b-80b8-29adc3d58536 req-07845f48-9b4b-4571-a1dc-6e64dd0324fc service nova] Releasing lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.908771] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7741d1cd-daa5-4c73-b6da-67fa431d844b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.919177] env[62820]: DEBUG oslo_vmware.api [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1754.919177] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523ca3d4-2466-bef9-78d7-a230ce21cc07" [ 1754.919177] env[62820]: _type = "Task" [ 1754.919177] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.922922] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523cf5d9-d945-10f5-dd4e-c68f7080424b, 'name': SearchDatastore_Task, 'duration_secs': 0.010673} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.928245] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.928545] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 35b95400-6399-48ae-b7d5-420c33d653dd/35b95400-6399-48ae-b7d5-420c33d653dd.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1754.929331] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c44a6af-6155-4e85-a9c1-aef9240fc29e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.937888] env[62820]: DEBUG oslo_vmware.api [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523ca3d4-2466-bef9-78d7-a230ce21cc07, 'name': SearchDatastore_Task, 'duration_secs': 0.007447} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.939385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.939781] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1754.939781] env[62820]: value = "task-1696208" [ 1754.939781] env[62820]: _type = "Task" [ 1754.939781] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.952013] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.981333] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.992852] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351d8546-70e4-412b-bec6-f0f8bb7d28a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.001415] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37258f7b-aa3f-4142-a4ce-7a719ff17134 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.032048] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db3db3c-749d-461b-9358-fb0fc6e549c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.039866] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c8eaa5-c4b8-4183-b8e2-65920d0a11fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.053523] env[62820]: DEBUG nova.compute.provider_tree [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1755.291236] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1755.291656] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b47b2089-bee7-4f07-8e69-22fa594aad8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.302329] env[62820]: DEBUG nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1755.305650] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1755.305650] env[62820]: value = "task-1696209" [ 1755.305650] env[62820]: _type = "Task" [ 1755.305650] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.318652] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696209, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.452912] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474532} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.453239] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 35b95400-6399-48ae-b7d5-420c33d653dd/35b95400-6399-48ae-b7d5-420c33d653dd.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1755.453434] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1755.453689] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfc1d0e8-95e3-4b0f-8e52-d6b7edeabe92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.463248] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1755.463248] env[62820]: value = "task-1696210" [ 1755.463248] env[62820]: _type = "Task" [ 1755.463248] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.472522] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.557060] env[62820]: DEBUG nova.scheduler.client.report [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1755.821010] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696209, 'name': PowerOffVM_Task, 'duration_secs': 0.281462} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.821390] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1755.821652] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance '4323e7df-136f-4bbe-8160-fd7b2579727e' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1755.826760] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.973889] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067769} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.974163] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1755.974919] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867bb8d7-92f6-4e0e-b4ba-9e8a372248b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.997618] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 35b95400-6399-48ae-b7d5-420c33d653dd/35b95400-6399-48ae-b7d5-420c33d653dd.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1755.997880] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b65bb2b8-67a3-49ba-8282-6018d6650d1b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.016971] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1756.016971] env[62820]: value = "task-1696211" [ 1756.016971] env[62820]: _type = "Task" [ 1756.016971] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.025193] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.062311] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.858s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.065318] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.032s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.065504] env[62820]: DEBUG nova.objects.instance [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lazy-loading 'resources' on Instance uuid e420644c-cfcc-4f8c-ae03-c9ebef585690 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.084455] env[62820]: INFO nova.scheduler.client.report [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted allocations for instance 82379c63-8dce-4b61-afb9-9b6a5ff605b5 [ 1756.328219] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1756.328552] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1756.328716] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1756.328904] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1756.329123] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1756.329315] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1756.329606] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1756.329817] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1756.330038] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1756.330251] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1756.330469] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1756.335448] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e55106ad-15ed-47ad-b65e-99698dbb29f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.355711] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1756.355711] env[62820]: value = "task-1696212" [ 1756.355711] env[62820]: _type = "Task" [ 1756.355711] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.365645] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696212, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.527423] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696211, 'name': ReconfigVM_Task, 'duration_secs': 0.363858} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.527712] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 35b95400-6399-48ae-b7d5-420c33d653dd/35b95400-6399-48ae-b7d5-420c33d653dd.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1756.528357] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5aecbb1-5338-4653-84f3-835c38713a85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.535917] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1756.535917] env[62820]: value = "task-1696213" [ 1756.535917] env[62820]: _type = "Task" [ 1756.535917] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.545051] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696213, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.593221] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c45f0721-663d-4337-8861-ca0d987d3022 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "82379c63-8dce-4b61-afb9-9b6a5ff605b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.660s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.811704] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc13b98-2ea1-45e1-8f4f-78cdee33bccb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.820044] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072561c0-713b-49fb-95db-eba62c9c8877 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.855061] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9178cb1d-949c-40e4-bd9d-f7745f016537 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.872040] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10a418b-0ee3-490b-a9e3-6b61d4460db5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.876878] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696212, 'name': ReconfigVM_Task, 'duration_secs': 0.397799} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.877470] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance '4323e7df-136f-4bbe-8160-fd7b2579727e' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1756.890829] env[62820]: DEBUG nova.compute.provider_tree [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1757.046540] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696213, 'name': Rename_Task, 'duration_secs': 0.229389} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.046836] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1757.047127] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-669ef6fb-1701-4dea-800d-fa997c23ee22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.056690] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1757.056690] env[62820]: value = "task-1696214" [ 1757.056690] env[62820]: _type = "Task" [ 1757.056690] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.065043] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.383958] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1757.384236] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1757.384400] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1757.384553] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1757.384700] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1757.384845] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1757.385062] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1757.385227] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1757.385392] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1757.385565] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1757.385761] env[62820]: DEBUG nova.virt.hardware [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1757.391342] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Reconfiguring VM instance instance-0000005e to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1757.391661] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdd0ce2e-4eae-4916-b34c-73bbdaab1afc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.406063] env[62820]: DEBUG nova.scheduler.client.report [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1757.416997] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1757.416997] env[62820]: value = "task-1696215" [ 1757.416997] env[62820]: _type = "Task" [ 1757.416997] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.426996] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696215, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.568639] env[62820]: DEBUG oslo_vmware.api [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696214, 'name': PowerOnVM_Task, 'duration_secs': 0.457034} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.568876] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1757.569099] env[62820]: INFO nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Took 6.48 seconds to spawn the instance on the hypervisor. [ 1757.569283] env[62820]: DEBUG nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1757.570097] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155e08b5-e3e8-436a-8dab-77676e320235 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.818512] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.818763] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.911781] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.847s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.914372] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.547s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.915927] env[62820]: INFO nova.compute.claims [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1757.929677] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696215, 'name': ReconfigVM_Task, 'duration_secs': 0.401109} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.930180] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Reconfigured VM instance instance-0000005e to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1757.930896] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c778ec0-7a3e-4d88-b730-4c94d22f3e6d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.934305] env[62820]: INFO nova.scheduler.client.report [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Deleted allocations for instance e420644c-cfcc-4f8c-ae03-c9ebef585690 [ 1757.958438] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 4323e7df-136f-4bbe-8160-fd7b2579727e/4323e7df-136f-4bbe-8160-fd7b2579727e.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1757.960027] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f04b708-f697-44bc-baa0-87aa86eb6e92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.983740] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1757.983740] env[62820]: value = "task-1696216" [ 1757.983740] env[62820]: _type = "Task" [ 1757.983740] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.993107] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696216, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.086250] env[62820]: INFO nova.compute.manager [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Took 14.35 seconds to build instance. [ 1758.321754] env[62820]: DEBUG nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1758.448470] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.448808] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.462921] env[62820]: DEBUG oslo_concurrency.lockutils [None req-86f6f04b-463d-469f-90a2-5e052ea36a03 tempest-ServersNegativeTestJSON-723863828 tempest-ServersNegativeTestJSON-723863828-project-member] Lock "e420644c-cfcc-4f8c-ae03-c9ebef585690" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.819s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.494846] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696216, 'name': ReconfigVM_Task, 'duration_secs': 0.272474} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.495176] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 4323e7df-136f-4bbe-8160-fd7b2579727e/4323e7df-136f-4bbe-8160-fd7b2579727e.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1758.495435] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance '4323e7df-136f-4bbe-8160-fd7b2579727e' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1758.589214] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f04c316a-566a-440e-ba69-b210caf9f114 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.880s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.848517] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.952982] env[62820]: DEBUG nova.compute.utils [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1759.004777] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3c048f-37d2-4d5a-b484-b9eaaad3258f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.027662] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e86dd8-65af-46c0-bcfc-e35fe643e1cf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.048358] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance '4323e7df-136f-4bbe-8160-fd7b2579727e' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.194587] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b63af2-c6d2-457f-a521-57a6a385797c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.203465] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0478e7a8-0b77-478a-aee3-7423539d44ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.242152] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b51276-8efa-42ef-9bb5-1ef566108914 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.249423] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bc5726-ebf2-486f-bd4f-a8817bc81374 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.266933] env[62820]: DEBUG nova.compute.provider_tree [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1759.342173] env[62820]: DEBUG nova.compute.manager [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1759.454211] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.597353] env[62820]: DEBUG nova.network.neutron [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Port 8cea8850-c5a0-4831-99cc-8920c44710b7 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1759.774695] env[62820]: DEBUG nova.scheduler.client.report [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1759.865262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.281906] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.282957] env[62820]: DEBUG nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1760.285952] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 5.347s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.538877] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.539179] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.539396] env[62820]: INFO nova.compute.manager [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Attaching volume 16c6f1fc-6f67-4e08-aab1-a64450c4d085 to /dev/sdb [ 1760.569910] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea955e4-e68f-4e1c-a6b2-07407165b70b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.577242] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9571c214-0db0-4368-ac4a-62a5a1d17f3c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.591297] env[62820]: DEBUG nova.virt.block_device [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Updating existing volume attachment record: 5622884f-310f-4737-9edc-fe8d77528c7e {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1760.620050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.620287] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.620458] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.791575] env[62820]: DEBUG nova.compute.utils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1760.791575] env[62820]: DEBUG nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1760.791575] env[62820]: DEBUG nova.network.neutron [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1760.871159] env[62820]: DEBUG nova.policy [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1761.070678] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273919df-888a-4074-be58-408711593eed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.079181] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67af5809-3160-45ac-ab00-f6b99070c92a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.113940] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd25e20-2595-4e7f-8249-e3c99dc5d8ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.118978] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b778567-02d9-4fa7-83e8-825d0bf8e423 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.136848] env[62820]: DEBUG nova.compute.provider_tree [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1761.294376] env[62820]: DEBUG nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1761.304933] env[62820]: DEBUG nova.network.neutron [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Successfully created port: ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1761.640397] env[62820]: DEBUG nova.scheduler.client.report [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1761.671258] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.671451] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.671626] env[62820]: DEBUG nova.network.neutron [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1762.308858] env[62820]: DEBUG nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1762.336683] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1762.336952] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1762.337120] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1762.337304] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1762.337447] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1762.337591] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1762.337796] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1762.337951] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1762.338126] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1762.338288] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1762.338455] env[62820]: DEBUG nova.virt.hardware [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1762.339398] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d42ddc-60eb-4396-9b0c-4eb5a02941ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.350112] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6892ed6-dcc2-4ce5-a95c-a59a55a340b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.373929] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.375011] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.377053] env[62820]: DEBUG nova.network.neutron [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance_info_cache with network_info: [{"id": "8cea8850-c5a0-4831-99cc-8920c44710b7", "address": "fa:16:3e:a4:96:78", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cea8850-c5", "ovs_interfaceid": "8cea8850-c5a0-4831-99cc-8920c44710b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.651565] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.365s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.651892] env[62820]: DEBUG nova.compute.manager [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62820) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5253}} [ 1762.654461] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.673s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.654682] env[62820]: DEBUG nova.objects.instance [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lazy-loading 'pci_requests' on Instance uuid a8486f52-998d-4308-813a-9c651e2eb093 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1762.815164] env[62820]: DEBUG nova.compute.manager [req-fca81978-2341-44eb-997a-bbe3f0f51d30 req-c38f1872-d5df-4806-80f3-64a145c140ba service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received event network-vif-plugged-ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1762.815396] env[62820]: DEBUG oslo_concurrency.lockutils [req-fca81978-2341-44eb-997a-bbe3f0f51d30 req-c38f1872-d5df-4806-80f3-64a145c140ba service nova] Acquiring lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.815610] env[62820]: DEBUG oslo_concurrency.lockutils [req-fca81978-2341-44eb-997a-bbe3f0f51d30 req-c38f1872-d5df-4806-80f3-64a145c140ba service nova] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.815782] env[62820]: DEBUG oslo_concurrency.lockutils [req-fca81978-2341-44eb-997a-bbe3f0f51d30 req-c38f1872-d5df-4806-80f3-64a145c140ba service nova] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.815952] env[62820]: DEBUG nova.compute.manager [req-fca81978-2341-44eb-997a-bbe3f0f51d30 req-c38f1872-d5df-4806-80f3-64a145c140ba service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] No waiting events found dispatching network-vif-plugged-ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1762.816217] env[62820]: WARNING nova.compute.manager [req-fca81978-2341-44eb-997a-bbe3f0f51d30 req-c38f1872-d5df-4806-80f3-64a145c140ba service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received unexpected event network-vif-plugged-ced8fea2-d4eb-4f3b-b2be-7974608dd130 for instance with vm_state building and task_state spawning. [ 1762.875978] env[62820]: DEBUG nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1762.879184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.947891] env[62820]: DEBUG nova.network.neutron [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Successfully updated port: ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1763.644956] env[62820]: DEBUG nova.objects.instance [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lazy-loading 'numa_topology' on Instance uuid a8486f52-998d-4308-813a-9c651e2eb093 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.650650] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.650844] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.650926] env[62820]: DEBUG nova.network.neutron [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1763.655575] env[62820]: INFO nova.compute.claims [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1763.678180] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.686418] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef7568e-9f60-461e-a367-e23137e78f66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.707127] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd857d28-df9e-40dc-a8f2-2bd9c9a688a8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.710743] env[62820]: INFO nova.scheduler.client.report [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted allocation for migration cf85995d-0f02-4aa1-b5bb-a279754babe4 [ 1763.716765] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance '4323e7df-136f-4bbe-8160-fd7b2579727e' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1764.160702] env[62820]: DEBUG nova.objects.instance [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'flavor' on Instance uuid 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1764.184039] env[62820]: DEBUG nova.network.neutron [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1764.221786] env[62820]: DEBUG oslo_concurrency.lockutils [None req-40e7d4e8-d891-4ae6-939e-9cdf768bbae4 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.205s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.225156] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1764.225156] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06cb17b2-9a27-462f-8878-f393ce2a6bcb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.233467] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1764.233467] env[62820]: value = "task-1696221" [ 1764.233467] env[62820]: _type = "Task" [ 1764.233467] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.245032] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696221, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.341224] env[62820]: DEBUG nova.network.neutron [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.669940] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.670273] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.670497] env[62820]: DEBUG nova.network.neutron [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1764.670579] env[62820]: DEBUG nova.objects.instance [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'info_cache' on Instance uuid 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1764.745027] env[62820]: DEBUG oslo_vmware.api [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696221, 'name': PowerOnVM_Task, 'duration_secs': 0.386023} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.747494] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1764.747682] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1077a1f5-62a8-400b-82ba-f20a2a45ccb0 tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance '4323e7df-136f-4bbe-8160-fd7b2579727e' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1764.843559] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.843899] env[62820]: DEBUG nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Instance network_info: |[{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1764.845106] env[62820]: DEBUG nova.compute.manager [req-4e403d76-2136-460e-a875-a48c360330b4 req-7527a771-41d9-4f20-940f-3434de368d42 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received event network-changed-ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1764.845309] env[62820]: DEBUG nova.compute.manager [req-4e403d76-2136-460e-a875-a48c360330b4 req-7527a771-41d9-4f20-940f-3434de368d42 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing instance network info cache due to event network-changed-ced8fea2-d4eb-4f3b-b2be-7974608dd130. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1764.845526] env[62820]: DEBUG oslo_concurrency.lockutils [req-4e403d76-2136-460e-a875-a48c360330b4 req-7527a771-41d9-4f20-940f-3434de368d42 service nova] Acquiring lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.845678] env[62820]: DEBUG oslo_concurrency.lockutils [req-4e403d76-2136-460e-a875-a48c360330b4 req-7527a771-41d9-4f20-940f-3434de368d42 service nova] Acquired lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.846186] env[62820]: DEBUG nova.network.neutron [req-4e403d76-2136-460e-a875-a48c360330b4 req-7527a771-41d9-4f20-940f-3434de368d42 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing network info cache for port ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1764.847388] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:da:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da623279-b6f6-4570-8b15-a332120b8b60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ced8fea2-d4eb-4f3b-b2be-7974608dd130', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1764.855701] env[62820]: DEBUG oslo.service.loopingcall [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1764.860589] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1764.861387] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fca173b-269d-49fe-882d-e728e017b4f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.885235] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1764.885235] env[62820]: value = "task-1696222" [ 1764.885235] env[62820]: _type = "Task" [ 1764.885235] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.894883] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696222, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.922161] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45a2e97-00f3-423c-858e-e538695df650 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.930140] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632efc91-ea3a-4aa7-9511-b708f207207a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.965903] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222029ec-b7c1-4931-996d-fff9bdc28baa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.974952] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735add2b-ec2a-412d-b0ff-4e14bc8f5267 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.990707] env[62820]: DEBUG nova.compute.provider_tree [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1765.139908] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1765.140175] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353651', 'volume_id': '16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'name': 'volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '03b0abc8-dd32-4cf9-8750-d64b8a66695e', 'attached_at': '', 'detached_at': '', 'volume_id': '16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'serial': '16c6f1fc-6f67-4e08-aab1-a64450c4d085'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1765.141105] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b4537a-93eb-484b-9e75-7ee184f59ffc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.158801] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec4e28f-d188-439b-a3e4-cdff2c9d29bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.162257] env[62820]: DEBUG nova.network.neutron [req-4e403d76-2136-460e-a875-a48c360330b4 req-7527a771-41d9-4f20-940f-3434de368d42 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updated VIF entry in instance network info cache for port ced8fea2-d4eb-4f3b-b2be-7974608dd130. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1765.162667] env[62820]: DEBUG nova.network.neutron [req-4e403d76-2136-460e-a875-a48c360330b4 req-7527a771-41d9-4f20-940f-3434de368d42 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1765.182022] env[62820]: DEBUG nova.objects.base [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Object Instance<210277a2-dd10-4e08-8627-4b025a554410> lazy-loaded attributes: flavor,info_cache {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1765.190312] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085/volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1765.193596] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db213fd1-bfcd-46c8-a230-ac396ed2d1f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.213775] env[62820]: DEBUG oslo_vmware.api [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1765.213775] env[62820]: value = "task-1696223" [ 1765.213775] env[62820]: _type = "Task" [ 1765.213775] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.223107] env[62820]: DEBUG oslo_vmware.api [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.395890] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696222, 'name': CreateVM_Task, 'duration_secs': 0.33233} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.395890] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1765.396588] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.396749] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.397071] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1765.397324] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e50dd2a1-7c4d-46f9-b5ee-fbfec38dc12e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.402470] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1765.402470] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52abf839-28b0-cf5e-d776-1bc52784176f" [ 1765.402470] env[62820]: _type = "Task" [ 1765.402470] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.414692] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52abf839-28b0-cf5e-d776-1bc52784176f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.442749] env[62820]: DEBUG nova.network.neutron [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance_info_cache with network_info: [{"id": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "address": "fa:16:3e:e8:03:6c", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ea13ce-9e", "ovs_interfaceid": "66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1765.499099] env[62820]: DEBUG nova.scheduler.client.report [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1765.665804] env[62820]: DEBUG oslo_concurrency.lockutils [req-4e403d76-2136-460e-a875-a48c360330b4 req-7527a771-41d9-4f20-940f-3434de368d42 service nova] Releasing lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.723484] env[62820]: DEBUG oslo_vmware.api [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696223, 'name': ReconfigVM_Task, 'duration_secs': 0.357054} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.723781] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Reconfigured VM instance instance-00000057 to attach disk [datastore1] volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085/volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1765.728362] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe348d9c-a41a-4025-8861-f5dd05425a70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.744213] env[62820]: DEBUG oslo_vmware.api [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1765.744213] env[62820]: value = "task-1696224" [ 1765.744213] env[62820]: _type = "Task" [ 1765.744213] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.753793] env[62820]: DEBUG oslo_vmware.api [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.916136] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52abf839-28b0-cf5e-d776-1bc52784176f, 'name': SearchDatastore_Task, 'duration_secs': 0.013563} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.916411] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.916647] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1765.916889] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.917050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.917235] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1765.917503] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f5e07ba-7710-4b0d-a965-5a56f1c036be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.928059] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1765.928059] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1765.928793] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-071a0c3a-ac39-425d-9b8b-e85c3d987c5b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.934875] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1765.934875] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52036b63-17f7-5ecd-e157-5df642812281" [ 1765.934875] env[62820]: _type = "Task" [ 1765.934875] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.943931] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52036b63-17f7-5ecd-e157-5df642812281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.945482] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-210277a2-dd10-4e08-8627-4b025a554410" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.005489] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.351s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.007762] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.181s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.009267] env[62820]: INFO nova.compute.claims [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1766.046592] env[62820]: INFO nova.network.neutron [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating port 89ab754d-6988-4b28-882b-5f352eda86ec with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1766.256517] env[62820]: DEBUG oslo_vmware.api [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696224, 'name': ReconfigVM_Task, 'duration_secs': 0.19592} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.256517] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353651', 'volume_id': '16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'name': 'volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '03b0abc8-dd32-4cf9-8750-d64b8a66695e', 'attached_at': '', 'detached_at': '', 'volume_id': '16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'serial': '16c6f1fc-6f67-4e08-aab1-a64450c4d085'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1766.446828] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52036b63-17f7-5ecd-e157-5df642812281, 'name': SearchDatastore_Task, 'duration_secs': 0.01091} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.446828] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d71881d-947d-4311-abc1-3f5dfc1d1551 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.455496] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1766.455496] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c5f931-1bcb-ead1-ee43-0a6ebfe8b51e" [ 1766.455496] env[62820]: _type = "Task" [ 1766.455496] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.464045] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c5f931-1bcb-ead1-ee43-0a6ebfe8b51e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.620603] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4323e7df-136f-4bbe-8160-fd7b2579727e" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.620727] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.620909] env[62820]: DEBUG nova.compute.manager [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Going to confirm migration 5 {{(pid=62820) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5127}} [ 1766.953908] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1766.954359] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a212e9d9-a070-41ea-b974-161ffbfaf88b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.967849] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c5f931-1bcb-ead1-ee43-0a6ebfe8b51e, 'name': SearchDatastore_Task, 'duration_secs': 0.032622} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.969197] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.969466] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4ea2be66-06b4-4519-82b0-c2b1df329a5a/4ea2be66-06b4-4519-82b0-c2b1df329a5a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1766.969784] env[62820]: DEBUG oslo_vmware.api [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1766.969784] env[62820]: value = "task-1696225" [ 1766.969784] env[62820]: _type = "Task" [ 1766.969784] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.969966] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac2c2a2e-26f2-498b-8ee7-4559e730bc10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.980042] env[62820]: DEBUG oslo_vmware.api [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.981277] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1766.981277] env[62820]: value = "task-1696226" [ 1766.981277] env[62820]: _type = "Task" [ 1766.981277] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.989434] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.191040] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.191288] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquired lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.191477] env[62820]: DEBUG nova.network.neutron [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1767.191663] env[62820]: DEBUG nova.objects.instance [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'info_cache' on Instance uuid 4323e7df-136f-4bbe-8160-fd7b2579727e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1767.287234] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34b1ba3-0cd2-4360-b44a-1c7c235b2081 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.292720] env[62820]: DEBUG nova.objects.instance [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'flavor' on Instance uuid 03b0abc8-dd32-4cf9-8750-d64b8a66695e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1767.297942] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5022e877-8a20-4791-96d3-2b916e3486e3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.336207] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4002321c-f6a7-4b33-9ff9-01cbe800b2ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.348027] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0169da1-5d2f-4844-bb8d-b82710dd0912 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.365065] env[62820]: DEBUG nova.compute.provider_tree [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1767.483696] env[62820]: DEBUG oslo_vmware.api [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696225, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.493182] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696226, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471793} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.493480] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4ea2be66-06b4-4519-82b0-c2b1df329a5a/4ea2be66-06b4-4519-82b0-c2b1df329a5a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1767.493765] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1767.493955] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26d1b6f3-8a93-4530-a8b4-645cfaef5519 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.502230] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1767.502230] env[62820]: value = "task-1696227" [ 1767.502230] env[62820]: _type = "Task" [ 1767.502230] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.511407] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.615332] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.615533] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.615776] env[62820]: DEBUG nova.network.neutron [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1767.666157] env[62820]: DEBUG nova.compute.manager [req-346938f9-7ed5-4499-889f-e276c307809d req-9de6bebc-e283-46e9-b34b-0d7e82bfc063 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received event network-vif-plugged-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1767.666390] env[62820]: DEBUG oslo_concurrency.lockutils [req-346938f9-7ed5-4499-889f-e276c307809d req-9de6bebc-e283-46e9-b34b-0d7e82bfc063 service nova] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.666622] env[62820]: DEBUG oslo_concurrency.lockutils [req-346938f9-7ed5-4499-889f-e276c307809d req-9de6bebc-e283-46e9-b34b-0d7e82bfc063 service nova] Lock "a8486f52-998d-4308-813a-9c651e2eb093-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.666793] env[62820]: DEBUG oslo_concurrency.lockutils [req-346938f9-7ed5-4499-889f-e276c307809d req-9de6bebc-e283-46e9-b34b-0d7e82bfc063 service nova] Lock "a8486f52-998d-4308-813a-9c651e2eb093-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.666963] env[62820]: DEBUG nova.compute.manager [req-346938f9-7ed5-4499-889f-e276c307809d req-9de6bebc-e283-46e9-b34b-0d7e82bfc063 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] No waiting events found dispatching network-vif-plugged-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1767.667253] env[62820]: WARNING nova.compute.manager [req-346938f9-7ed5-4499-889f-e276c307809d req-9de6bebc-e283-46e9-b34b-0d7e82bfc063 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received unexpected event network-vif-plugged-89ab754d-6988-4b28-882b-5f352eda86ec for instance with vm_state shelved_offloaded and task_state spawning. [ 1767.797880] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d4635d77-7be9-4b59-9566-09f3dfcf926c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.259s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.868761] env[62820]: DEBUG nova.scheduler.client.report [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1767.983427] env[62820]: DEBUG oslo_vmware.api [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696225, 'name': PowerOnVM_Task, 'duration_secs': 0.59831} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.983427] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1767.983427] env[62820]: DEBUG nova.compute.manager [None req-68d3a137-e6f2-4857-be94-994c00aa1eeb tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1767.986104] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92508e64-3905-4c44-ac83-226ab16fc5a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.012051] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082763} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.012336] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1768.013157] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1aab2a2-6e0a-4246-b15a-304c4dca137e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.036367] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 4ea2be66-06b4-4519-82b0-c2b1df329a5a/4ea2be66-06b4-4519-82b0-c2b1df329a5a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1768.036983] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d10ba9d-915d-47f2-a6c5-ff460fd2a8d3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.058365] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1768.058365] env[62820]: value = "task-1696228" [ 1768.058365] env[62820]: _type = "Task" [ 1768.058365] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.066561] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696228, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.373942] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.374504] env[62820]: DEBUG nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1768.377108] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.529s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.380675] env[62820]: INFO nova.compute.claims [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1768.386772] env[62820]: DEBUG nova.network.neutron [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating instance_info_cache with network_info: [{"id": "89ab754d-6988-4b28-882b-5f352eda86ec", "address": "fa:16:3e:6f:b6:6d", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ab754d-69", "ovs_interfaceid": "89ab754d-6988-4b28-882b-5f352eda86ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.450929] env[62820]: DEBUG nova.network.neutron [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance_info_cache with network_info: [{"id": "8cea8850-c5a0-4831-99cc-8920c44710b7", "address": "fa:16:3e:a4:96:78", "network": {"id": "06e3d02b-ea9b-4708-bdcc-4dcd5be5be4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-683230935-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65abf73e789b48d3ba24e2660d7c0341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cea8850-c5", "ovs_interfaceid": "8cea8850-c5a0-4831-99cc-8920c44710b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.569281] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696228, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.886168] env[62820]: DEBUG nova.compute.utils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1768.889435] env[62820]: DEBUG nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1768.889621] env[62820]: DEBUG nova.network.neutron [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1768.891970] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.917895] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='aa8a7faa65893b314d34b4c9e445d1c1',container_format='bare',created_at=2024-12-10T16:54:56Z,direct_url=,disk_format='vmdk',id=9a38379f-96c5-43f3-94c6-97b0f5ad58c1,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-844770894-shelved',owner='bfe9869537de4334a0c8ce91fd062659',properties=ImageMetaProps,protected=,size=31672320,status='active',tags=,updated_at=2024-12-10T16:55:14Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1768.918157] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1768.918314] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1768.918499] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1768.918714] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1768.918874] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1768.919119] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1768.919291] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1768.919460] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1768.919649] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1768.919837] env[62820]: DEBUG nova.virt.hardware [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1768.920708] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2619ad9d-9a95-409c-967f-551f28b56094 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.929119] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da42ff2-20a1-4d27-abc7-916fa4df1557 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.943759] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:b6:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89ab754d-6988-4b28-882b-5f352eda86ec', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1768.951109] env[62820]: DEBUG oslo.service.loopingcall [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1768.951367] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1768.951584] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1288986-e217-4d71-9d97-68b843cca569 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.968923] env[62820]: DEBUG nova.policy [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815f8967d40e4943a66da6866de8b018', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14768f5b38ea4f6abf5583ce5e4409f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1768.970543] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Releasing lock "refresh_cache-4323e7df-136f-4bbe-8160-fd7b2579727e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.970776] env[62820]: DEBUG nova.objects.instance [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lazy-loading 'migration_context' on Instance uuid 4323e7df-136f-4bbe-8160-fd7b2579727e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1768.979644] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1768.979644] env[62820]: value = "task-1696229" [ 1768.979644] env[62820]: _type = "Task" [ 1768.979644] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.987662] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696229, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.071061] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696228, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.311104] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "392d8bca-2d8d-42c3-ba14-fc1387c75405" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.311339] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "392d8bca-2d8d-42c3-ba14-fc1387c75405" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.356818] env[62820]: DEBUG nova.network.neutron [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Successfully created port: 18cc900d-6813-4f95-b166-a6b1a486f112 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1769.390685] env[62820]: DEBUG nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1769.474374] env[62820]: DEBUG nova.objects.base [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Object Instance<4323e7df-136f-4bbe-8160-fd7b2579727e> lazy-loaded attributes: info_cache,migration_context {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1769.477543] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c0e0a2-e3b9-48bf-9c6f-8372b92b636a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.507318] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc1d66c9-9a60-4b0e-86d6-c3f4edc62d42 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.509582] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696229, 'name': CreateVM_Task, 'duration_secs': 0.344895} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.510572] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1769.511574] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.511757] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.512144] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1769.512391] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55c50715-ef93-45bb-9ab3-282dd0aa3c84 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.514896] env[62820]: DEBUG oslo_vmware.api [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1769.514896] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5269aa6c-5fbe-8a69-48ef-d9375cccef8a" [ 1769.514896] env[62820]: _type = "Task" [ 1769.514896] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.521298] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1769.521298] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f778c9-9e35-c76e-80df-36690224ccfe" [ 1769.521298] env[62820]: _type = "Task" [ 1769.521298] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.527946] env[62820]: DEBUG oslo_vmware.api [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5269aa6c-5fbe-8a69-48ef-d9375cccef8a, 'name': SearchDatastore_Task, 'duration_secs': 0.007405} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.528713] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.533906] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.534207] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Processing image 9a38379f-96c5-43f3-94c6-97b0f5ad58c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1769.534419] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1/9a38379f-96c5-43f3-94c6-97b0f5ad58c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.534570] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1/9a38379f-96c5-43f3-94c6-97b0f5ad58c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.534775] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1769.535051] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-019ffc39-31f7-4283-a9c2-8d64d9f9ba3d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.544073] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1769.544289] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1769.545011] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b595520-9042-4d1f-bd89-5af2e4033f6c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.550916] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1769.550916] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525d619b-1b93-57a4-af95-1c58472dcfb9" [ 1769.550916] env[62820]: _type = "Task" [ 1769.550916] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.562213] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525d619b-1b93-57a4-af95-1c58472dcfb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.573386] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696228, 'name': ReconfigVM_Task, 'duration_secs': 1.169504} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.573680] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 4ea2be66-06b4-4519-82b0-c2b1df329a5a/4ea2be66-06b4-4519-82b0-c2b1df329a5a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1769.574366] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b41fff6-f413-4cdb-915c-92840877d5ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.581432] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1769.581432] env[62820]: value = "task-1696230" [ 1769.581432] env[62820]: _type = "Task" [ 1769.581432] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.589979] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696230, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.697768] env[62820]: DEBUG nova.compute.manager [req-0c223ca9-7d57-4ea4-852c-5446cff0efc6 req-7ec4ebc7-c801-4a8c-a86c-88d06386e54a service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received event network-changed-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1769.698019] env[62820]: DEBUG nova.compute.manager [req-0c223ca9-7d57-4ea4-852c-5446cff0efc6 req-7ec4ebc7-c801-4a8c-a86c-88d06386e54a service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Refreshing instance network info cache due to event network-changed-89ab754d-6988-4b28-882b-5f352eda86ec. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1769.698246] env[62820]: DEBUG oslo_concurrency.lockutils [req-0c223ca9-7d57-4ea4-852c-5446cff0efc6 req-7ec4ebc7-c801-4a8c-a86c-88d06386e54a service nova] Acquiring lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.698410] env[62820]: DEBUG oslo_concurrency.lockutils [req-0c223ca9-7d57-4ea4-852c-5446cff0efc6 req-7ec4ebc7-c801-4a8c-a86c-88d06386e54a service nova] Acquired lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.698605] env[62820]: DEBUG nova.network.neutron [req-0c223ca9-7d57-4ea4-852c-5446cff0efc6 req-7ec4ebc7-c801-4a8c-a86c-88d06386e54a service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Refreshing network info cache for port 89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1769.707710] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4950b2-04b3-455c-895f-0795cec0c164 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.717230] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84db889-1a0b-4369-8947-ea9d4db18fb8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.750793] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2ec111-3b2a-42df-bc5b-65c09adbb4ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.758965] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276e900c-89fa-4e34-93e0-12e95ee664c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.773018] env[62820]: DEBUG nova.compute.provider_tree [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1769.813648] env[62820]: DEBUG nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1769.964633] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "210277a2-dd10-4e08-8627-4b025a554410" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.964907] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.965132] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "210277a2-dd10-4e08-8627-4b025a554410-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.965732] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.965907] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.969410] env[62820]: INFO nova.compute.manager [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Terminating instance [ 1770.066339] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Preparing fetch location {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1770.066714] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Fetch image to [datastore1] OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971/OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971.vmdk {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1770.066997] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Downloading stream optimized image 9a38379f-96c5-43f3-94c6-97b0f5ad58c1 to [datastore1] OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971/OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971.vmdk on the data store datastore1 as vApp {{(pid=62820) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1770.067259] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Downloading image file data 9a38379f-96c5-43f3-94c6-97b0f5ad58c1 to the ESX as VM named 'OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971' {{(pid=62820) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1770.094879] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696230, 'name': Rename_Task, 'duration_secs': 0.174835} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.111040] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1770.111831] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56006e99-32db-492f-bb56-44e2044e7ea7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.118980] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1770.118980] env[62820]: value = "task-1696231" [ 1770.118980] env[62820]: _type = "Task" [ 1770.118980] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.128203] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696231, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.146888] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1770.146888] env[62820]: value = "resgroup-9" [ 1770.146888] env[62820]: _type = "ResourcePool" [ 1770.146888] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1770.147203] env[62820]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-93ecbeb0-ae50-47eb-9b1b-df2922eedfde {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.168825] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lease: (returnval){ [ 1770.168825] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d10124-b01a-1458-5738-d8009e240863" [ 1770.168825] env[62820]: _type = "HttpNfcLease" [ 1770.168825] env[62820]: } obtained for vApp import into resource pool (val){ [ 1770.168825] env[62820]: value = "resgroup-9" [ 1770.168825] env[62820]: _type = "ResourcePool" [ 1770.168825] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1770.169166] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the lease: (returnval){ [ 1770.169166] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d10124-b01a-1458-5738-d8009e240863" [ 1770.169166] env[62820]: _type = "HttpNfcLease" [ 1770.169166] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1770.176340] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1770.176340] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d10124-b01a-1458-5738-d8009e240863" [ 1770.176340] env[62820]: _type = "HttpNfcLease" [ 1770.176340] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1770.277379] env[62820]: DEBUG nova.scheduler.client.report [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1770.336959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.403225] env[62820]: DEBUG nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1770.417182] env[62820]: DEBUG nova.network.neutron [req-0c223ca9-7d57-4ea4-852c-5446cff0efc6 req-7ec4ebc7-c801-4a8c-a86c-88d06386e54a service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updated VIF entry in instance network info cache for port 89ab754d-6988-4b28-882b-5f352eda86ec. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1770.417554] env[62820]: DEBUG nova.network.neutron [req-0c223ca9-7d57-4ea4-852c-5446cff0efc6 req-7ec4ebc7-c801-4a8c-a86c-88d06386e54a service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating instance_info_cache with network_info: [{"id": "89ab754d-6988-4b28-882b-5f352eda86ec", "address": "fa:16:3e:6f:b6:6d", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ab754d-69", "ovs_interfaceid": "89ab754d-6988-4b28-882b-5f352eda86ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.425680] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1770.425956] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1770.426147] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1770.426338] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1770.426485] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1770.426630] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1770.426832] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1770.426989] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1770.427167] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1770.427328] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1770.427497] env[62820]: DEBUG nova.virt.hardware [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1770.429100] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070fe5d1-ebc5-41ea-aecb-042dd1987075 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.437784] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bf5a11-9308-4106-a3c3-20b193577d4c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.473628] env[62820]: DEBUG nova.compute.manager [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1770.473834] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1770.474605] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239426a4-df2a-4fc0-852b-10ee70db7d86 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.482145] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1770.482382] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-088d8ec0-e1cb-4416-83b7-b358d368ee1f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.488854] env[62820]: DEBUG oslo_vmware.api [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1770.488854] env[62820]: value = "task-1696233" [ 1770.488854] env[62820]: _type = "Task" [ 1770.488854] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.496245] env[62820]: DEBUG oslo_vmware.api [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.630789] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696231, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.677695] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1770.677695] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d10124-b01a-1458-5738-d8009e240863" [ 1770.677695] env[62820]: _type = "HttpNfcLease" [ 1770.677695] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1770.782765] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.783367] env[62820]: DEBUG nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1770.786237] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.921s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.884351] env[62820]: DEBUG nova.compute.manager [req-eea679b6-3eb7-4b9e-8ba7-068ec6bdeba9 req-052c52d8-57c4-4ab7-9eb4-8b8b4f50e72f service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Received event network-vif-plugged-18cc900d-6813-4f95-b166-a6b1a486f112 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1770.884586] env[62820]: DEBUG oslo_concurrency.lockutils [req-eea679b6-3eb7-4b9e-8ba7-068ec6bdeba9 req-052c52d8-57c4-4ab7-9eb4-8b8b4f50e72f service nova] Acquiring lock "a495b540-806d-4cd8-b340-86fe937867cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.884825] env[62820]: DEBUG oslo_concurrency.lockutils [req-eea679b6-3eb7-4b9e-8ba7-068ec6bdeba9 req-052c52d8-57c4-4ab7-9eb4-8b8b4f50e72f service nova] Lock "a495b540-806d-4cd8-b340-86fe937867cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.885024] env[62820]: DEBUG oslo_concurrency.lockutils [req-eea679b6-3eb7-4b9e-8ba7-068ec6bdeba9 req-052c52d8-57c4-4ab7-9eb4-8b8b4f50e72f service nova] Lock "a495b540-806d-4cd8-b340-86fe937867cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.885217] env[62820]: DEBUG nova.compute.manager [req-eea679b6-3eb7-4b9e-8ba7-068ec6bdeba9 req-052c52d8-57c4-4ab7-9eb4-8b8b4f50e72f service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] No waiting events found dispatching network-vif-plugged-18cc900d-6813-4f95-b166-a6b1a486f112 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1770.885392] env[62820]: WARNING nova.compute.manager [req-eea679b6-3eb7-4b9e-8ba7-068ec6bdeba9 req-052c52d8-57c4-4ab7-9eb4-8b8b4f50e72f service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Received unexpected event network-vif-plugged-18cc900d-6813-4f95-b166-a6b1a486f112 for instance with vm_state building and task_state spawning. [ 1770.921658] env[62820]: DEBUG oslo_concurrency.lockutils [req-0c223ca9-7d57-4ea4-852c-5446cff0efc6 req-7ec4ebc7-c801-4a8c-a86c-88d06386e54a service nova] Releasing lock "refresh_cache-a8486f52-998d-4308-813a-9c651e2eb093" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.999579] env[62820]: DEBUG oslo_vmware.api [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696233, 'name': PowerOffVM_Task, 'duration_secs': 0.432768} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.999885] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1771.000081] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1771.000337] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8df9812-edce-4e13-963a-eed895de29b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.049381] env[62820]: DEBUG nova.network.neutron [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Successfully updated port: 18cc900d-6813-4f95-b166-a6b1a486f112 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1771.132916] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696231, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.134308] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1771.134511] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1771.134691] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleting the datastore file [datastore1] 210277a2-dd10-4e08-8627-4b025a554410 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1771.135025] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2b74bb4-1edb-4eca-b8d6-9aad0e667a07 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.143774] env[62820]: DEBUG oslo_vmware.api [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1771.143774] env[62820]: value = "task-1696235" [ 1771.143774] env[62820]: _type = "Task" [ 1771.143774] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.153848] env[62820]: DEBUG oslo_vmware.api [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.178745] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1771.178745] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d10124-b01a-1458-5738-d8009e240863" [ 1771.178745] env[62820]: _type = "HttpNfcLease" [ 1771.178745] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1771.290141] env[62820]: DEBUG nova.compute.utils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1771.294334] env[62820]: INFO nova.compute.claims [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1771.298539] env[62820]: DEBUG nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1771.298872] env[62820]: DEBUG nova.network.neutron [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1771.349389] env[62820]: DEBUG nova.policy [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba8e4dc4cd634bf293d02187fbc77b72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ca1b6f7bda3437eb67f5f765b5864a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1771.502911] env[62820]: DEBUG oslo_concurrency.lockutils [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "3228cd34-2144-425a-aca6-400cb0991e43" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.503167] env[62820]: DEBUG oslo_concurrency.lockutils [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.554739] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "refresh_cache-a495b540-806d-4cd8-b340-86fe937867cd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.554830] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "refresh_cache-a495b540-806d-4cd8-b340-86fe937867cd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.555015] env[62820]: DEBUG nova.network.neutron [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1771.629014] env[62820]: DEBUG nova.network.neutron [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Successfully created port: 2d669a0f-9bd6-4e38-8692-b076be881645 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1771.634596] env[62820]: DEBUG oslo_vmware.api [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696231, 'name': PowerOnVM_Task, 'duration_secs': 1.083181} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.634835] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1771.635049] env[62820]: INFO nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1771.635237] env[62820]: DEBUG nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1771.636436] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c109097e-5940-4c72-b03a-e917b378bf02 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.654439] env[62820]: DEBUG oslo_vmware.api [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164641} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.654724] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1771.654910] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1771.655163] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1771.655433] env[62820]: INFO nova.compute.manager [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1771.655823] env[62820]: DEBUG oslo.service.loopingcall [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1771.655974] env[62820]: DEBUG nova.compute.manager [-] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1771.656099] env[62820]: DEBUG nova.network.neutron [-] [instance: 210277a2-dd10-4e08-8627-4b025a554410] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1771.679840] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1771.679840] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d10124-b01a-1458-5738-d8009e240863" [ 1771.679840] env[62820]: _type = "HttpNfcLease" [ 1771.679840] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1771.680220] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1771.680220] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d10124-b01a-1458-5738-d8009e240863" [ 1771.680220] env[62820]: _type = "HttpNfcLease" [ 1771.680220] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1771.680977] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6029100-1f67-4625-9c13-1773fd3cad8a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.693018] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529539dc-6d22-6502-282b-d7ce2d36bfa9/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1771.693018] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating HTTP connection to write to file with size = 31672320 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529539dc-6d22-6502-282b-d7ce2d36bfa9/disk-0.vmdk. {{(pid=62820) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1771.756618] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6590a42c-f3dc-4431-bca3-02ffad707893 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.799804] env[62820]: DEBUG nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1771.803864] env[62820]: INFO nova.compute.resource_tracker [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating resource usage from migration b8b6ec5c-db87-41a5-a6af-b30a71a3bb9f [ 1772.006537] env[62820]: DEBUG nova.compute.utils [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1772.096715] env[62820]: DEBUG nova.network.neutron [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1772.118690] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b1904d-0d06-41b7-8341-2af06e451be9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.132283] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4106670b-e5c5-455b-ba65-6eb9d8da8f03 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.186017] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b067d9-d852-4670-94d6-cbd5e5127445 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.192937] env[62820]: INFO nova.compute.manager [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Took 23.84 seconds to build instance. [ 1772.201713] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90e1d55-bf0a-49d4-b0fa-75f83fe9d01e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.222590] env[62820]: DEBUG nova.compute.provider_tree [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.303017] env[62820]: DEBUG nova.network.neutron [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Updating instance_info_cache with network_info: [{"id": "18cc900d-6813-4f95-b166-a6b1a486f112", "address": "fa:16:3e:33:1a:24", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18cc900d-68", "ovs_interfaceid": "18cc900d-6813-4f95-b166-a6b1a486f112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.511197] env[62820]: DEBUG oslo_concurrency.lockutils [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.695302] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5f6f1dda-f0d2-4d1d-bb30-94ae1fda724d tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.350s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.710278] env[62820]: DEBUG nova.network.neutron [-] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.727484] env[62820]: DEBUG nova.scheduler.client.report [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1772.805573] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "refresh_cache-a495b540-806d-4cd8-b340-86fe937867cd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.805920] env[62820]: DEBUG nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Instance network_info: |[{"id": "18cc900d-6813-4f95-b166-a6b1a486f112", "address": "fa:16:3e:33:1a:24", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18cc900d-68", "ovs_interfaceid": "18cc900d-6813-4f95-b166-a6b1a486f112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1772.806867] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:1a:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18cc900d-6813-4f95-b166-a6b1a486f112', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1772.814779] env[62820]: DEBUG oslo.service.loopingcall [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1772.816869] env[62820]: DEBUG nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1772.819015] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1772.819471] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1657315c-55fe-450b-8198-474245692302 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.843030] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1772.843030] env[62820]: value = "task-1696236" [ 1772.843030] env[62820]: _type = "Task" [ 1772.843030] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.846384] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1772.846647] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1772.846805] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1772.847031] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1772.847258] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1772.847457] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1772.847830] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1772.848482] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1772.848482] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1772.848718] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1772.849023] env[62820]: DEBUG nova.virt.hardware [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1772.850061] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b12a109-e645-4578-93da-7ffa65b31809 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.866489] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696236, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.868641] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a610fe-f31d-4b20-98c9-ce7548d52417 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.106824] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Completed reading data from the image iterator. {{(pid=62820) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1773.107127] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529539dc-6d22-6502-282b-d7ce2d36bfa9/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1773.108784] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b0bad3-9c39-49dd-b4ec-a43299de9e7f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.118737] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529539dc-6d22-6502-282b-d7ce2d36bfa9/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1773.118995] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529539dc-6d22-6502-282b-d7ce2d36bfa9/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1773.119235] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-898db31d-0dc8-455e-9bf3-92a96251df9a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.212900] env[62820]: INFO nova.compute.manager [-] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Took 1.56 seconds to deallocate network for instance. [ 1773.229076] env[62820]: DEBUG nova.network.neutron [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Successfully updated port: 2d669a0f-9bd6-4e38-8692-b076be881645 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1773.240773] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.454s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.241111] env[62820]: INFO nova.compute.manager [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Migrating [ 1773.248366] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.570s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.249905] env[62820]: INFO nova.compute.claims [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1773.359668] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696236, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.461849] env[62820]: DEBUG oslo_vmware.rw_handles [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529539dc-6d22-6502-282b-d7ce2d36bfa9/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1773.462158] env[62820]: INFO nova.virt.vmwareapi.images [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Downloaded image file data 9a38379f-96c5-43f3-94c6-97b0f5ad58c1 [ 1773.463164] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0839ebb-a91a-405c-a771-0e7bdcfb9f59 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.481168] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03593dfa-9f85-41c1-a2af-e4c2dc3cd956 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.576181] env[62820]: DEBUG oslo_concurrency.lockutils [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "3228cd34-2144-425a-aca6-400cb0991e43" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.576512] env[62820]: DEBUG oslo_concurrency.lockutils [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.576779] env[62820]: INFO nova.compute.manager [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Attaching volume a9165bc5-6390-450a-8758-45dec9de7f6a to /dev/sdb [ 1773.608311] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314199be-3ac5-48e6-8f66-88b186f2a55e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.616600] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7294e599-dc12-4ca7-8453-c47202153847 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.631928] env[62820]: DEBUG nova.virt.block_device [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating existing volume attachment record: 46679175-04b6-4549-8815-71d0644dcb17 {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1773.644791] env[62820]: INFO nova.virt.vmwareapi.images [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] The imported VM was unregistered [ 1773.647155] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Caching image {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1773.647405] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating directory with path [datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1 {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1773.647691] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10eeb784-06c3-4a96-bc11-e5a72a6991ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.661776] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Created directory with path [datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1 {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1773.661983] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971/OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971.vmdk to [datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1/9a38379f-96c5-43f3-94c6-97b0f5ad58c1.vmdk. {{(pid=62820) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1773.662266] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b45a3308-fdf2-4880-8e18-72b3d1c12a30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.670666] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1773.670666] env[62820]: value = "task-1696238" [ 1773.670666] env[62820]: _type = "Task" [ 1773.670666] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.679569] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696238, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.720978] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.732896] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "refresh_cache-efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.733044] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "refresh_cache-efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.733178] env[62820]: DEBUG nova.network.neutron [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1773.761140] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.761534] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.761607] env[62820]: DEBUG nova.network.neutron [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1773.861648] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696236, 'name': CreateVM_Task, 'duration_secs': 0.598304} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.861832] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1773.862560] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.863126] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.863126] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1773.863327] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35311fb5-6024-428d-9263-f64549e7222d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.868379] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1773.868379] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cff7ea-f371-f3f6-be7e-b79bc12c4a19" [ 1773.868379] env[62820]: _type = "Task" [ 1773.868379] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.876999] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cff7ea-f371-f3f6-be7e-b79bc12c4a19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.897645] env[62820]: DEBUG nova.compute.manager [req-18aa4085-6f2f-43b4-befe-66be64156f43 req-d6040261-547c-40ac-b723-d104fd496d33 service nova] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Received event network-vif-deleted-66ea13ce-9ec1-417e-9ce1-a7427e2fc5a1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1774.183196] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696238, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.244374] env[62820]: DEBUG nova.compute.manager [req-9caea00b-37b5-400c-981e-909ddaf8b114 req-d9ee62a2-2929-4ce7-b2e0-e99ba500443c service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Received event network-changed-18cc900d-6813-4f95-b166-a6b1a486f112 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1774.244740] env[62820]: DEBUG nova.compute.manager [req-9caea00b-37b5-400c-981e-909ddaf8b114 req-d9ee62a2-2929-4ce7-b2e0-e99ba500443c service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Refreshing instance network info cache due to event network-changed-18cc900d-6813-4f95-b166-a6b1a486f112. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1774.244740] env[62820]: DEBUG oslo_concurrency.lockutils [req-9caea00b-37b5-400c-981e-909ddaf8b114 req-d9ee62a2-2929-4ce7-b2e0-e99ba500443c service nova] Acquiring lock "refresh_cache-a495b540-806d-4cd8-b340-86fe937867cd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.244891] env[62820]: DEBUG oslo_concurrency.lockutils [req-9caea00b-37b5-400c-981e-909ddaf8b114 req-d9ee62a2-2929-4ce7-b2e0-e99ba500443c service nova] Acquired lock "refresh_cache-a495b540-806d-4cd8-b340-86fe937867cd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.245098] env[62820]: DEBUG nova.network.neutron [req-9caea00b-37b5-400c-981e-909ddaf8b114 req-d9ee62a2-2929-4ce7-b2e0-e99ba500443c service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Refreshing network info cache for port 18cc900d-6813-4f95-b166-a6b1a486f112 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1774.269996] env[62820]: DEBUG nova.network.neutron [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1774.389483] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cff7ea-f371-f3f6-be7e-b79bc12c4a19, 'name': SearchDatastore_Task, 'duration_secs': 0.096526} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.389869] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.390127] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1774.390404] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.390558] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.390742] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1774.391440] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eebf0966-980e-4231-96f7-5aecfa720981 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.421481] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1774.421930] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1774.423018] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87e90f63-3d56-41c5-a423-886332ba6997 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.437644] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1774.437644] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526c9cc1-06d9-8718-7079-6a8dc5ec8d7a" [ 1774.437644] env[62820]: _type = "Task" [ 1774.437644] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.457715] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526c9cc1-06d9-8718-7079-6a8dc5ec8d7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.590279] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5805bbd2-5369-44e2-b18b-f587e7a6359a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.601633] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9069a99c-fdc7-4fc3-afcc-973c30c2cbeb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.641460] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a76a06-4f09-4c8a-a2a8-ca27b1fb7fa8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.647721] env[62820]: DEBUG nova.network.neutron [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Updating instance_info_cache with network_info: [{"id": "2d669a0f-9bd6-4e38-8692-b076be881645", "address": "fa:16:3e:23:9f:59", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d669a0f-9b", "ovs_interfaceid": "2d669a0f-9bd6-4e38-8692-b076be881645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.652656] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768e30c4-f655-4439-a53c-95a093a64956 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.670969] env[62820]: DEBUG nova.compute.provider_tree [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.695414] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696238, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.958252] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526c9cc1-06d9-8718-7079-6a8dc5ec8d7a, 'name': SearchDatastore_Task, 'duration_secs': 0.095499} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.959096] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f32bef1-354f-4a76-bc7f-ded4530bd63b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.969284] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1774.969284] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c2aa13-d4f7-da86-bbd7-08706a060171" [ 1774.969284] env[62820]: _type = "Task" [ 1774.969284] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.982959] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c2aa13-d4f7-da86-bbd7-08706a060171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.031465] env[62820]: DEBUG nova.network.neutron [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance_info_cache with network_info: [{"id": "778fee60-5af2-4328-a536-56882267761d", "address": "fa:16:3e:92:ed:c9", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap778fee60-5a", "ovs_interfaceid": "778fee60-5af2-4328-a536-56882267761d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.155194] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "refresh_cache-efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.155194] env[62820]: DEBUG nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Instance network_info: |[{"id": "2d669a0f-9bd6-4e38-8692-b076be881645", "address": "fa:16:3e:23:9f:59", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d669a0f-9b", "ovs_interfaceid": "2d669a0f-9bd6-4e38-8692-b076be881645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1775.155194] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:9f:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d669a0f-9bd6-4e38-8692-b076be881645', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1775.166448] env[62820]: DEBUG oslo.service.loopingcall [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1775.166964] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1775.167325] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1692e257-122d-49ad-b61d-1ffd9116283c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.188570] env[62820]: DEBUG nova.scheduler.client.report [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1775.210029] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696238, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.210029] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1775.210029] env[62820]: value = "task-1696242" [ 1775.210029] env[62820]: _type = "Task" [ 1775.210029] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.224140] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696242, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.351895] env[62820]: DEBUG nova.network.neutron [req-9caea00b-37b5-400c-981e-909ddaf8b114 req-d9ee62a2-2929-4ce7-b2e0-e99ba500443c service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Updated VIF entry in instance network info cache for port 18cc900d-6813-4f95-b166-a6b1a486f112. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1775.352312] env[62820]: DEBUG nova.network.neutron [req-9caea00b-37b5-400c-981e-909ddaf8b114 req-d9ee62a2-2929-4ce7-b2e0-e99ba500443c service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Updating instance_info_cache with network_info: [{"id": "18cc900d-6813-4f95-b166-a6b1a486f112", "address": "fa:16:3e:33:1a:24", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18cc900d-68", "ovs_interfaceid": "18cc900d-6813-4f95-b166-a6b1a486f112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.484102] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c2aa13-d4f7-da86-bbd7-08706a060171, 'name': SearchDatastore_Task, 'duration_secs': 0.099121} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.484447] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.484772] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a495b540-806d-4cd8-b340-86fe937867cd/a495b540-806d-4cd8-b340-86fe937867cd.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1775.485310] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3612ef7c-b6b7-4b30-b7bc-bee00b5b270f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.497587] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1775.497587] env[62820]: value = "task-1696243" [ 1775.497587] env[62820]: _type = "Task" [ 1775.497587] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.512574] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.533460] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.698818] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.699402] env[62820]: DEBUG nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1775.706102] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 6.177s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.707492] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696238, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.724038] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696242, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.856118] env[62820]: DEBUG oslo_concurrency.lockutils [req-9caea00b-37b5-400c-981e-909ddaf8b114 req-d9ee62a2-2929-4ce7-b2e0-e99ba500443c service nova] Releasing lock "refresh_cache-a495b540-806d-4cd8-b340-86fe937867cd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.948865] env[62820]: DEBUG nova.compute.manager [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1775.949018] env[62820]: DEBUG nova.compute.manager [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing instance network info cache due to event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1775.949241] env[62820]: DEBUG oslo_concurrency.lockutils [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.949493] env[62820]: DEBUG oslo_concurrency.lockutils [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.949542] env[62820]: DEBUG nova.network.neutron [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1776.009959] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.198711] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696238, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.210686] env[62820]: DEBUG nova.compute.utils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1776.216941] env[62820]: DEBUG nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1776.216941] env[62820]: DEBUG nova.network.neutron [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1776.230594] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696242, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.357514] env[62820]: DEBUG nova.policy [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18246bae0222415c96ec5b252cf5bd6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57b0c64a8704e7aaeba4011866c7a24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1776.381898] env[62820]: DEBUG nova.compute.manager [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Received event network-vif-plugged-2d669a0f-9bd6-4e38-8692-b076be881645 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1776.382922] env[62820]: DEBUG oslo_concurrency.lockutils [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] Acquiring lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.383538] env[62820]: DEBUG oslo_concurrency.lockutils [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] Lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.383653] env[62820]: DEBUG oslo_concurrency.lockutils [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] Lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.384306] env[62820]: DEBUG nova.compute.manager [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] No waiting events found dispatching network-vif-plugged-2d669a0f-9bd6-4e38-8692-b076be881645 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1776.384425] env[62820]: WARNING nova.compute.manager [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Received unexpected event network-vif-plugged-2d669a0f-9bd6-4e38-8692-b076be881645 for instance with vm_state building and task_state spawning. [ 1776.384597] env[62820]: DEBUG nova.compute.manager [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Received event network-changed-2d669a0f-9bd6-4e38-8692-b076be881645 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1776.385394] env[62820]: DEBUG nova.compute.manager [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Refreshing instance network info cache due to event network-changed-2d669a0f-9bd6-4e38-8692-b076be881645. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1776.385394] env[62820]: DEBUG oslo_concurrency.lockutils [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] Acquiring lock "refresh_cache-efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.385394] env[62820]: DEBUG oslo_concurrency.lockutils [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] Acquired lock "refresh_cache-efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.385394] env[62820]: DEBUG nova.network.neutron [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Refreshing network info cache for port 2d669a0f-9bd6-4e38-8692-b076be881645 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1776.514170] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696243, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.581067] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ab6399-740b-44f9-aa9c-d240b4557a8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.589733] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afeaaa7-2d40-496e-bf29-3e0b6566e955 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.629089] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc9185e-9556-412e-a6e1-21005de5ee50 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.638110] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70293a6d-7b5d-4de1-94a3-ae27edfbee76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.659027] env[62820]: DEBUG nova.compute.provider_tree [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1776.699839] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696238, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.581681} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.700300] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971/OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971.vmdk to [datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1/9a38379f-96c5-43f3-94c6-97b0f5ad58c1.vmdk. [ 1776.700633] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Cleaning up location [datastore1] OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1776.700944] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_3b48033d-b300-49ac-b46f-18a9f3514971 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1776.701453] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3a2d2bd-27f3-42af-9ff7-9e5ba9464432 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.710650] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1776.710650] env[62820]: value = "task-1696244" [ 1776.710650] env[62820]: _type = "Task" [ 1776.710650] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.716725] env[62820]: DEBUG nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1776.723675] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.733880] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696242, 'name': CreateVM_Task, 'duration_secs': 1.300406} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.734259] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1776.735090] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.735410] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.735869] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1776.737657] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47c94bb8-973d-481d-aaa5-19c347ea76b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.744896] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1776.744896] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d59fea-2f96-47a8-773a-74c23f5dd5bf" [ 1776.744896] env[62820]: _type = "Task" [ 1776.744896] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.758184] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d59fea-2f96-47a8-773a-74c23f5dd5bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.011666] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696243, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.151831} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.012106] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a495b540-806d-4cd8-b340-86fe937867cd/a495b540-806d-4cd8-b340-86fe937867cd.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1777.012479] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1777.012830] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba1ce066-8283-43eb-9ca1-5127edcdf4fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.022986] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1777.022986] env[62820]: value = "task-1696245" [ 1777.022986] env[62820]: _type = "Task" [ 1777.022986] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.036713] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696245, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.051826] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfab30d2-9adc-4bed-bb30-cc314a178bba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.070885] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance '35b95400-6399-48ae-b7d5-420c33d653dd' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1777.117106] env[62820]: DEBUG nova.network.neutron [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Updated VIF entry in instance network info cache for port 2d669a0f-9bd6-4e38-8692-b076be881645. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1777.117106] env[62820]: DEBUG nova.network.neutron [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Updating instance_info_cache with network_info: [{"id": "2d669a0f-9bd6-4e38-8692-b076be881645", "address": "fa:16:3e:23:9f:59", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d669a0f-9b", "ovs_interfaceid": "2d669a0f-9bd6-4e38-8692-b076be881645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.161336] env[62820]: DEBUG nova.network.neutron [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updated VIF entry in instance network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1777.161336] env[62820]: DEBUG nova.network.neutron [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.164515] env[62820]: DEBUG nova.scheduler.client.report [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1777.222668] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061064} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.222926] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1777.223210] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1/9a38379f-96c5-43f3-94c6-97b0f5ad58c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.223499] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1/9a38379f-96c5-43f3-94c6-97b0f5ad58c1.vmdk to [datastore1] a8486f52-998d-4308-813a-9c651e2eb093/a8486f52-998d-4308-813a-9c651e2eb093.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1777.223771] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c88f1f46-9e6c-41dc-9c39-01fb8825c38c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.236440] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1777.236440] env[62820]: value = "task-1696246" [ 1777.236440] env[62820]: _type = "Task" [ 1777.236440] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.247630] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696246, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.248664] env[62820]: DEBUG nova.network.neutron [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Successfully created port: 94134340-ccc9-4f22-af2f-2b68424f6ec3 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1777.262420] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d59fea-2f96-47a8-773a-74c23f5dd5bf, 'name': SearchDatastore_Task, 'duration_secs': 0.026389} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.262746] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.263176] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1777.263275] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1777.263427] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1777.263621] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1777.267079] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43f38e7b-971a-4d67-aecb-857397a767d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.277207] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1777.277405] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1777.278141] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d77861d-e318-4830-b068-473c865f9715 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.284479] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1777.284479] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52eb6e87-8091-663a-3439-5cff7f29826b" [ 1777.284479] env[62820]: _type = "Task" [ 1777.284479] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.292682] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52eb6e87-8091-663a-3439-5cff7f29826b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.534795] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696245, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072175} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.535523] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1777.536412] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dd3c24-53c3-4ecb-b420-2ec99ea32a6d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.562787] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] a495b540-806d-4cd8-b340-86fe937867cd/a495b540-806d-4cd8-b340-86fe937867cd.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1777.563142] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bdbc766-5070-4d21-b795-f9cc75b38857 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.578930] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1777.579279] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d22ffe5-f4dd-4128-932d-365a3ce42723 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.591067] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1777.591067] env[62820]: value = "task-1696248" [ 1777.591067] env[62820]: _type = "Task" [ 1777.591067] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.591366] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1777.591366] env[62820]: value = "task-1696247" [ 1777.591366] env[62820]: _type = "Task" [ 1777.591366] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.607573] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.612613] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696248, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.619543] env[62820]: DEBUG oslo_concurrency.lockutils [req-94b44aa3-198a-4d1d-9e28-f75e3d2edb4c req-ce8bcafb-aaca-4226-b665-effa5b726ecc service nova] Releasing lock "refresh_cache-efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.663071] env[62820]: DEBUG oslo_concurrency.lockutils [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.663380] env[62820]: DEBUG nova.compute.manager [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received event network-changed-ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1777.663634] env[62820]: DEBUG nova.compute.manager [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing instance network info cache due to event network-changed-ced8fea2-d4eb-4f3b-b2be-7974608dd130. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1777.663875] env[62820]: DEBUG oslo_concurrency.lockutils [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] Acquiring lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1777.664042] env[62820]: DEBUG oslo_concurrency.lockutils [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] Acquired lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1777.664221] env[62820]: DEBUG nova.network.neutron [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing network info cache for port ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1777.732381] env[62820]: DEBUG nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1777.757543] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696246, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.771904] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1777.771904] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1777.771904] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1777.771904] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1777.772258] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1777.772258] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1777.773551] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1777.773551] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1777.773551] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1777.773551] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1777.773551] env[62820]: DEBUG nova.virt.hardware [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1777.774403] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb75fef-a107-442e-b93c-62b5fd46c4b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.786116] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1861c237-3514-4c2d-9db4-b87ea5a65ea3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.803106] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52eb6e87-8091-663a-3439-5cff7f29826b, 'name': SearchDatastore_Task, 'duration_secs': 0.009751} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.812532] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f5fc566-3371-4da8-be0d-9040c1516198 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.821383] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1777.821383] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526923af-9e5b-6780-d1bc-3bde56724df5" [ 1777.821383] env[62820]: _type = "Task" [ 1777.821383] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.832039] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526923af-9e5b-6780-d1bc-3bde56724df5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.107275] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696248, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.110508] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696247, 'name': PowerOffVM_Task, 'duration_secs': 0.450369} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.110987] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1778.110987] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance '35b95400-6399-48ae-b7d5-420c33d653dd' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1778.173974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.468s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.176943] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.840s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.179212] env[62820]: INFO nova.compute.claims [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1778.203913] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquiring lock "76bd4a09-300d-460e-8442-21b4f6567698" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.204060] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "76bd4a09-300d-460e-8442-21b4f6567698" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.204245] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquiring lock "76bd4a09-300d-460e-8442-21b4f6567698-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.204499] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "76bd4a09-300d-460e-8442-21b4f6567698-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.204754] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "76bd4a09-300d-460e-8442-21b4f6567698-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.208495] env[62820]: INFO nova.compute.manager [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Terminating instance [ 1778.249648] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696246, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.335265] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526923af-9e5b-6780-d1bc-3bde56724df5, 'name': SearchDatastore_Task, 'duration_secs': 0.092271} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.338314] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.338775] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] efe5ffe8-cd29-467d-85ad-d9e7d4eb9203/efe5ffe8-cd29-467d-85ad-d9e7d4eb9203.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1778.339104] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67245ec1-5b9f-4cd0-bb7f-f38e2f2e39c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.349582] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1778.349582] env[62820]: value = "task-1696249" [ 1778.349582] env[62820]: _type = "Task" [ 1778.349582] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.364405] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.376203] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-9c0d9676-9db9-4be2-a8e6-84bd816234aa-5120b8e3-6688-4386-9c99-ee01add07316" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.376485] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-9c0d9676-9db9-4be2-a8e6-84bd816234aa-5120b8e3-6688-4386-9c99-ee01add07316" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.376960] env[62820]: DEBUG nova.objects.instance [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'flavor' on Instance uuid 9c0d9676-9db9-4be2-a8e6-84bd816234aa {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1778.427737] env[62820]: DEBUG nova.network.neutron [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updated VIF entry in instance network info cache for port ced8fea2-d4eb-4f3b-b2be-7974608dd130. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1778.428169] env[62820]: DEBUG nova.network.neutron [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1778.605124] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696248, 'name': ReconfigVM_Task, 'duration_secs': 1.004184} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.605480] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Reconfigured VM instance instance-00000062 to attach disk [datastore1] a495b540-806d-4cd8-b340-86fe937867cd/a495b540-806d-4cd8-b340-86fe937867cd.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1778.606364] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52a8a8d8-47f9-4508-ad85-bce6f3b69c4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.615167] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1778.615167] env[62820]: value = "task-1696251" [ 1778.615167] env[62820]: _type = "Task" [ 1778.615167] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.617771] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1778.618093] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1778.618313] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1778.618519] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1778.618740] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1778.618913] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1778.619146] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1778.619313] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1778.619480] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1778.619673] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1778.619878] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1778.628344] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e22bfec0-a25a-47e7-8a65-58a476177173 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.647943] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696251, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.649680] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1778.649680] env[62820]: value = "task-1696252" [ 1778.649680] env[62820]: _type = "Task" [ 1778.649680] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.661425] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696252, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.714093] env[62820]: DEBUG nova.compute.manager [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1778.714384] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1778.715672] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761db2fe-7f70-4ddb-8bfc-803daf5c87d1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.725973] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1778.726244] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a6a8e58-ad4c-4974-a90d-027685b1fd46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.736545] env[62820]: DEBUG oslo_vmware.api [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1778.736545] env[62820]: value = "task-1696253" [ 1778.736545] env[62820]: _type = "Task" [ 1778.736545] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.747372] env[62820]: INFO nova.scheduler.client.report [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted allocation for migration a4086721-d70e-446a-bef4-66aa59f5e32e [ 1778.754695] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696246, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.755082] env[62820]: DEBUG oslo_vmware.api [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696253, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.864888] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.931433] env[62820]: DEBUG oslo_concurrency.lockutils [req-55669c3b-b8f3-45f2-b0ef-9994dd4cf9bb req-e5237f5b-0505-469d-8603-7821d3c4feb9 service nova] Releasing lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.058226] env[62820]: DEBUG nova.objects.instance [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'pci_requests' on Instance uuid 9c0d9676-9db9-4be2-a8e6-84bd816234aa {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1779.074326] env[62820]: DEBUG nova.network.neutron [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Successfully updated port: 94134340-ccc9-4f22-af2f-2b68424f6ec3 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1779.129585] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696251, 'name': Rename_Task, 'duration_secs': 0.46605} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.129990] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1779.130182] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76d874f8-8909-42a3-ba6f-2e9e8440af69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.140392] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1779.140392] env[62820]: value = "task-1696254" [ 1779.140392] env[62820]: _type = "Task" [ 1779.140392] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.161466] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.170758] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696252, 'name': ReconfigVM_Task, 'duration_secs': 0.43755} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.171107] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance '35b95400-6399-48ae-b7d5-420c33d653dd' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1779.257962] env[62820]: DEBUG oslo_vmware.api [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696253, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.258326] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696246, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.260888] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.640s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.365398] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.465885] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837b97ae-2f89-4e1c-8df9-9008df92490a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.476544] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08114b67-c95c-42a8-b41f-548511fa03c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.511546] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3192c24a-6c38-4bf9-84d5-6a5bc2159c20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.522059] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7097e3-5df6-4e2c-a667-6b2c49f9c9d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.539474] env[62820]: DEBUG nova.compute.provider_tree [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1779.561926] env[62820]: DEBUG nova.objects.base [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Object Instance<9c0d9676-9db9-4be2-a8e6-84bd816234aa> lazy-loaded attributes: flavor,pci_requests {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1779.562167] env[62820]: DEBUG nova.network.neutron [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1779.578228] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-6768101f-8d1d-46be-b0b9-2fdf6cba08da" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.578228] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-6768101f-8d1d-46be-b0b9-2fdf6cba08da" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.578228] env[62820]: DEBUG nova.network.neutron [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1779.633286] env[62820]: DEBUG nova.policy [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1779.656525] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696254, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.679486] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1779.679964] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1779.680316] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1779.680664] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1779.680956] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1779.681273] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1779.681645] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1779.681969] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1779.682334] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1779.682677] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1779.683119] env[62820]: DEBUG nova.virt.hardware [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1779.693051] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1779.693575] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49af4346-c7ef-4203-aeaf-d7230d6f1398 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.725201] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1779.725201] env[62820]: value = "task-1696255" [ 1779.725201] env[62820]: _type = "Task" [ 1779.725201] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.738867] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696255, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.759531] env[62820]: DEBUG oslo_vmware.api [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696253, 'name': PowerOffVM_Task, 'duration_secs': 0.529368} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.765392] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1779.765795] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1779.768330] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696246, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.356871} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.768330] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abcf877e-b2ee-4756-9c43-728e75b53b17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.770815] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9a38379f-96c5-43f3-94c6-97b0f5ad58c1/9a38379f-96c5-43f3-94c6-97b0f5ad58c1.vmdk to [datastore1] a8486f52-998d-4308-813a-9c651e2eb093/a8486f52-998d-4308-813a-9c651e2eb093.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1779.772275] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ab9db2-403f-49bb-a390-e07c451725f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.810818] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] a8486f52-998d-4308-813a-9c651e2eb093/a8486f52-998d-4308-813a-9c651e2eb093.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1779.811406] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f68936fb-50a2-4269-a2b7-e91442d8fb09 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.849815] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1779.849815] env[62820]: value = "task-1696257" [ 1779.849815] env[62820]: _type = "Task" [ 1779.849815] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.861065] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696257, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.870047] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696249, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.887078] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1779.887078] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1779.887078] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Deleting the datastore file [datastore1] 76bd4a09-300d-460e-8442-21b4f6567698 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1779.887395] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cfa847e4-37b7-4876-ab3f-eaa9c967d22a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.897743] env[62820]: DEBUG oslo_vmware.api [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for the task: (returnval){ [ 1779.897743] env[62820]: value = "task-1696258" [ 1779.897743] env[62820]: _type = "Task" [ 1779.897743] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.910211] env[62820]: DEBUG oslo_vmware.api [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.043121] env[62820]: DEBUG nova.scheduler.client.report [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1780.109864] env[62820]: DEBUG nova.network.neutron [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1780.152669] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696254, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.240282] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696255, 'name': ReconfigVM_Task, 'duration_secs': 0.381997} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.240282] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1780.241128] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360c317f-912e-4e89-bd79-ef6d33c18cc0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.266697] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 35b95400-6399-48ae-b7d5-420c33d653dd/35b95400-6399-48ae-b7d5-420c33d653dd.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1780.268029] env[62820]: DEBUG nova.network.neutron [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Updating instance_info_cache with network_info: [{"id": "94134340-ccc9-4f22-af2f-2b68424f6ec3", "address": "fa:16:3e:ef:9c:5b", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94134340-cc", "ovs_interfaceid": "94134340-ccc9-4f22-af2f-2b68424f6ec3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.269250] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfdc56be-6b83-4f22-839f-79054e147569 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.283694] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-6768101f-8d1d-46be-b0b9-2fdf6cba08da" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.283945] env[62820]: DEBUG nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Instance network_info: |[{"id": "94134340-ccc9-4f22-af2f-2b68424f6ec3", "address": "fa:16:3e:ef:9c:5b", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94134340-cc", "ovs_interfaceid": "94134340-ccc9-4f22-af2f-2b68424f6ec3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1780.284674] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:9c:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94134340-ccc9-4f22-af2f-2b68424f6ec3', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1780.292056] env[62820]: DEBUG oslo.service.loopingcall [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1780.294980] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1780.295293] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d51393c-ef5e-4bcb-825b-d3726e9ff229 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.311273] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1780.311273] env[62820]: value = "task-1696259" [ 1780.311273] env[62820]: _type = "Task" [ 1780.311273] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.316895] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1780.316895] env[62820]: value = "task-1696260" [ 1780.316895] env[62820]: _type = "Task" [ 1780.316895] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.320595] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696259, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.328273] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696260, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.358336] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696257, 'name': ReconfigVM_Task, 'duration_secs': 0.394062} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.359074] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Reconfigured VM instance instance-0000003b to attach disk [datastore1] a8486f52-998d-4308-813a-9c651e2eb093/a8486f52-998d-4308-813a-9c651e2eb093.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1780.360646] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'boot_index': 0, 'device_type': 'disk', 'guest_format': None, 'disk_bus': None, 'encrypted': False, 'size': 0, 'device_name': '/dev/sda', 'encryption_options': None, 'encryption_secret_uuid': None, 'encryption_format': None, 'image_id': 'b17619ac-779a-4463-ab94-4bb0b9ba63c1'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353648', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'name': 'volume-1302a61c-1765-4676-9304-76b004523986', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8486f52-998d-4308-813a-9c651e2eb093', 'attached_at': '', 'detached_at': '', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'serial': '1302a61c-1765-4676-9304-76b004523986'}, 'attachment_id': '944d1f0d-5442-4324-b0f6-368eefbc9508', 'delete_on_termination': False, 'boot_index': None, 'device_type': None, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=62820) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1780.360857] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1780.361149] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353648', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'name': 'volume-1302a61c-1765-4676-9304-76b004523986', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8486f52-998d-4308-813a-9c651e2eb093', 'attached_at': '', 'detached_at': '', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'serial': '1302a61c-1765-4676-9304-76b004523986'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1780.362036] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab16b2e-cf78-474c-899b-a5e77b692838 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.388449] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1847ae2e-c32f-41c0-a79b-e78ef6853a8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.391676] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696249, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.619055} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.391884] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] efe5ffe8-cd29-467d-85ad-d9e7d4eb9203/efe5ffe8-cd29-467d-85ad-d9e7d4eb9203.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1780.392081] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1780.392727] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66d0e094-24c3-4999-a5f3-e69cb260a8c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.415253] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] volume-1302a61c-1765-4676-9304-76b004523986/volume-1302a61c-1765-4676-9304-76b004523986.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1780.419495] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34efa21d-f0ad-4d1e-b7b3-8e724ac6f224 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.433867] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1780.433867] env[62820]: value = "task-1696261" [ 1780.433867] env[62820]: _type = "Task" [ 1780.433867] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.444323] env[62820]: DEBUG oslo_vmware.api [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Task: {'id': task-1696258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211301} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.444323] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1780.444432] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1780.444592] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1780.444770] env[62820]: INFO nova.compute.manager [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Took 1.73 seconds to destroy the instance on the hypervisor. [ 1780.445155] env[62820]: DEBUG oslo.service.loopingcall [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1780.445481] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1780.445481] env[62820]: value = "task-1696262" [ 1780.445481] env[62820]: _type = "Task" [ 1780.445481] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.448766] env[62820]: DEBUG nova.compute.manager [-] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1780.448858] env[62820]: DEBUG nova.network.neutron [-] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1780.451191] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696261, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.462056] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696262, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.547804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.548343] env[62820]: DEBUG nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1780.551164] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.830s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.551375] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.569254] env[62820]: INFO nova.scheduler.client.report [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted allocations for instance 210277a2-dd10-4e08-8627-4b025a554410 [ 1780.656036] env[62820]: DEBUG oslo_vmware.api [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696254, 'name': PowerOnVM_Task, 'duration_secs': 1.129357} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.656311] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1780.657033] env[62820]: INFO nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Took 10.25 seconds to spawn the instance on the hypervisor. [ 1780.657033] env[62820]: DEBUG nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1780.657442] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d693f48-84fa-486c-b39a-db2f8aadaf99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.660124] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4323e7df-136f-4bbe-8160-fd7b2579727e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.660345] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.660548] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.660735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.660904] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.663387] env[62820]: INFO nova.compute.manager [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Terminating instance [ 1780.696436] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1780.696725] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353657', 'volume_id': 'a9165bc5-6390-450a-8758-45dec9de7f6a', 'name': 'volume-a9165bc5-6390-450a-8758-45dec9de7f6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3228cd34-2144-425a-aca6-400cb0991e43', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9165bc5-6390-450a-8758-45dec9de7f6a', 'serial': 'a9165bc5-6390-450a-8758-45dec9de7f6a'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1780.697604] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc928ab1-fd8c-403f-ae54-afff5f21f273 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.718975] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3c803d-f258-4ef7-8a3e-d218103dcf79 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.751784] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] volume-a9165bc5-6390-450a-8758-45dec9de7f6a/volume-a9165bc5-6390-450a-8758-45dec9de7f6a.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1780.752703] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3eb9bf33-fc63-41a7-a2a4-5496c52b9b6b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.776921] env[62820]: DEBUG oslo_vmware.api [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1780.776921] env[62820]: value = "task-1696263" [ 1780.776921] env[62820]: _type = "Task" [ 1780.776921] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.787651] env[62820]: DEBUG oslo_vmware.api [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696263, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.823900] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696259, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.829976] env[62820]: DEBUG nova.compute.manager [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received event network-changed-ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1780.830148] env[62820]: DEBUG nova.compute.manager [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing instance network info cache due to event network-changed-ced8fea2-d4eb-4f3b-b2be-7974608dd130. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1780.830369] env[62820]: DEBUG oslo_concurrency.lockutils [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] Acquiring lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.830514] env[62820]: DEBUG oslo_concurrency.lockutils [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] Acquired lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.830756] env[62820]: DEBUG nova.network.neutron [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing network info cache for port ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1780.839039] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696260, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.945737] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696261, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117038} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.946058] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1780.946951] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbb74d7-d8d5-4fee-8b25-692ba7e7f9ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.977422] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] efe5ffe8-cd29-467d-85ad-d9e7d4eb9203/efe5ffe8-cd29-467d-85ad-d9e7d4eb9203.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1780.978722] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b9c5f21-61b6-43ba-9925-6da79770ce3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.997610] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696262, 'name': ReconfigVM_Task, 'duration_secs': 0.324174} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.998366] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Reconfigured VM instance instance-0000003b to attach disk [datastore1] volume-1302a61c-1765-4676-9304-76b004523986/volume-1302a61c-1765-4676-9304-76b004523986.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1781.006357] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b84fcbda-e968-4f85-aef8-983e44f3ac0e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.017785] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1781.017785] env[62820]: value = "task-1696264" [ 1781.017785] env[62820]: _type = "Task" [ 1781.017785] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.024266] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1781.024266] env[62820]: value = "task-1696265" [ 1781.024266] env[62820]: _type = "Task" [ 1781.024266] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.030876] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.037438] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696265, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.055426] env[62820]: DEBUG nova.compute.utils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1781.057499] env[62820]: DEBUG nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1781.057804] env[62820]: DEBUG nova.network.neutron [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1781.077225] env[62820]: DEBUG oslo_concurrency.lockutils [None req-032737f5-5ca0-4b25-96a0-66b1664cdbc1 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "210277a2-dd10-4e08-8627-4b025a554410" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.112s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.103137] env[62820]: DEBUG nova.policy [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe4b58f7f5bd405db5c7f8b630032aa1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'accd5c1cf55248b780b00e33faf79fa0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1781.166766] env[62820]: DEBUG nova.compute.manager [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1781.166996] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1781.168132] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c970b9-ed13-4198-b03d-6ca2a6aa6897 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.181810] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1781.183792] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b45bf120-8461-4d7f-9418-e9bfe7c5f388 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.186413] env[62820]: INFO nova.compute.manager [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Took 25.38 seconds to build instance. [ 1781.192045] env[62820]: DEBUG oslo_vmware.api [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1781.192045] env[62820]: value = "task-1696266" [ 1781.192045] env[62820]: _type = "Task" [ 1781.192045] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.206235] env[62820]: DEBUG oslo_vmware.api [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696266, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.291809] env[62820]: DEBUG oslo_vmware.api [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696263, 'name': ReconfigVM_Task, 'duration_secs': 0.397512} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.292228] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfigured VM instance instance-00000047 to attach disk [datastore1] volume-a9165bc5-6390-450a-8758-45dec9de7f6a/volume-a9165bc5-6390-450a-8758-45dec9de7f6a.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1781.298249] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73c0c6ca-771c-4775-88d3-d7565b97b9e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.321033] env[62820]: DEBUG oslo_vmware.api [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1781.321033] env[62820]: value = "task-1696267" [ 1781.321033] env[62820]: _type = "Task" [ 1781.321033] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.334912] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696259, 'name': ReconfigVM_Task, 'duration_secs': 0.655313} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.335964] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 35b95400-6399-48ae-b7d5-420c33d653dd/35b95400-6399-48ae-b7d5-420c33d653dd.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1781.336565] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance '35b95400-6399-48ae-b7d5-420c33d653dd' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1781.350154] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696260, 'name': CreateVM_Task, 'duration_secs': 0.638096} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.350154] env[62820]: DEBUG oslo_vmware.api [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696267, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.350154] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1781.350762] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.351031] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.351328] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1781.351637] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07b90770-48d3-4cfb-bb5e-7fdc595fb776 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.358449] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1781.358449] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5200bbc7-400e-a776-0413-6f0c9eb6feb4" [ 1781.358449] env[62820]: _type = "Task" [ 1781.358449] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.376052] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5200bbc7-400e-a776-0413-6f0c9eb6feb4, 'name': SearchDatastore_Task, 'duration_secs': 0.012194} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.376593] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.377383] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1781.377635] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.377790] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.377977] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1781.378529] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba72de92-682d-4a29-9a16-2cc1d0c1fe6b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.394541] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1781.394741] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1781.396215] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67fb7a89-81c9-4b6a-adb6-739e4354a656 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.403806] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1781.403806] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526ee332-a67f-9388-e50a-da905afec0e8" [ 1781.403806] env[62820]: _type = "Task" [ 1781.403806] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.418418] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526ee332-a67f-9388-e50a-da905afec0e8, 'name': SearchDatastore_Task, 'duration_secs': 0.011101} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.419425] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ea00dec-39b1-4fde-a3c2-035d958fa7b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.425584] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1781.425584] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d5bb99-aa21-166b-6868-6b86f95e98d9" [ 1781.425584] env[62820]: _type = "Task" [ 1781.425584] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.438195] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d5bb99-aa21-166b-6868-6b86f95e98d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.537334] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696264, 'name': ReconfigVM_Task, 'duration_secs': 0.352481} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.540464] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Reconfigured VM instance instance-00000063 to attach disk [datastore1] efe5ffe8-cd29-467d-85ad-d9e7d4eb9203/efe5ffe8-cd29-467d-85ad-d9e7d4eb9203.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1781.544686] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696265, 'name': ReconfigVM_Task, 'duration_secs': 0.186162} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.545697] env[62820]: DEBUG nova.network.neutron [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Successfully updated port: 5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1781.547874] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9f4ed54-2ba8-4f47-b41f-d4f197d1dddb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.552908] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353648', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'name': 'volume-1302a61c-1765-4676-9304-76b004523986', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8486f52-998d-4308-813a-9c651e2eb093', 'attached_at': '', 'detached_at': '', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'serial': '1302a61c-1765-4676-9304-76b004523986'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1781.556176] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fcfee81-450e-459e-81d6-98c564ba02ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.562139] env[62820]: DEBUG nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1781.569941] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1781.569941] env[62820]: value = "task-1696268" [ 1781.569941] env[62820]: _type = "Task" [ 1781.569941] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.569941] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1781.569941] env[62820]: value = "task-1696269" [ 1781.569941] env[62820]: _type = "Task" [ 1781.569941] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.584152] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696269, 'name': Rename_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.587295] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696268, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.590161] env[62820]: DEBUG nova.network.neutron [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Successfully created port: f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1781.628871] env[62820]: DEBUG nova.network.neutron [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updated VIF entry in instance network info cache for port ced8fea2-d4eb-4f3b-b2be-7974608dd130. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1781.629157] env[62820]: DEBUG nova.network.neutron [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.689303] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f56d7ae7-2088-4c64-afc8-43205e4a3677 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.891s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.701965] env[62820]: DEBUG oslo_vmware.api [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696266, 'name': PowerOffVM_Task, 'duration_secs': 0.212459} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.702288] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1781.702443] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1781.702702] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-269116ec-b3c5-466b-babe-20029562857c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.785397] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1781.785397] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1781.785397] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleting the datastore file [datastore1] 4323e7df-136f-4bbe-8160-fd7b2579727e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1781.785397] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-adb9080c-c780-4185-994f-2738094e8fc2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.788360] env[62820]: DEBUG nova.network.neutron [-] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.796918] env[62820]: DEBUG oslo_vmware.api [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for the task: (returnval){ [ 1781.796918] env[62820]: value = "task-1696271" [ 1781.796918] env[62820]: _type = "Task" [ 1781.796918] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.807844] env[62820]: DEBUG oslo_vmware.api [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.832912] env[62820]: DEBUG oslo_vmware.api [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696267, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.853655] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12f3084-8e66-4405-83b4-f7801059f3f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.880436] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e487372d-27c2-4a00-b8c1-4bae169bd8e2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.902906] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance '35b95400-6399-48ae-b7d5-420c33d653dd' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1781.937293] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d5bb99-aa21-166b-6868-6b86f95e98d9, 'name': SearchDatastore_Task, 'duration_secs': 0.016144} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.937578] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.937881] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da/6768101f-8d1d-46be-b0b9-2fdf6cba08da.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1781.938179] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd2c753e-67ef-470b-9609-06bda39c6a7d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.946714] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1781.946714] env[62820]: value = "task-1696272" [ 1781.946714] env[62820]: _type = "Task" [ 1781.946714] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.959191] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.057242] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.057420] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.057611] env[62820]: DEBUG nova.network.neutron [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1782.086425] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696269, 'name': Rename_Task, 'duration_secs': 0.342564} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.089349] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1782.089900] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696268, 'name': Rename_Task, 'duration_secs': 0.311751} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.090228] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf8d86b2-3da0-4e5f-ad72-bc739c34d77f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.091776] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1782.092214] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21f68735-c848-440e-861c-3fcfbf72c493 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.099521] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1782.099521] env[62820]: value = "task-1696274" [ 1782.099521] env[62820]: _type = "Task" [ 1782.099521] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.104019] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1782.104019] env[62820]: value = "task-1696273" [ 1782.104019] env[62820]: _type = "Task" [ 1782.104019] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.118646] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.122489] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.131808] env[62820]: DEBUG oslo_concurrency.lockutils [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] Releasing lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.132125] env[62820]: DEBUG nova.compute.manager [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1782.132305] env[62820]: DEBUG nova.compute.manager [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing instance network info cache due to event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1782.132541] env[62820]: DEBUG oslo_concurrency.lockutils [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.292438] env[62820]: INFO nova.compute.manager [-] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Took 1.84 seconds to deallocate network for instance. [ 1782.308932] env[62820]: DEBUG oslo_vmware.api [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Task: {'id': task-1696271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167369} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.309253] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1782.310102] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1782.310102] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1782.310102] env[62820]: INFO nova.compute.manager [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1782.310296] env[62820]: DEBUG oslo.service.loopingcall [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1782.310370] env[62820]: DEBUG nova.compute.manager [-] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1782.310886] env[62820]: DEBUG nova.network.neutron [-] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1782.334687] env[62820]: DEBUG oslo_vmware.api [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696267, 'name': ReconfigVM_Task, 'duration_secs': 0.526474} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.335129] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353657', 'volume_id': 'a9165bc5-6390-450a-8758-45dec9de7f6a', 'name': 'volume-a9165bc5-6390-450a-8758-45dec9de7f6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3228cd34-2144-425a-aca6-400cb0991e43', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9165bc5-6390-450a-8758-45dec9de7f6a', 'serial': 'a9165bc5-6390-450a-8758-45dec9de7f6a'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1782.457169] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696272, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.482810] env[62820]: DEBUG nova.network.neutron [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Port 778fee60-5af2-4328-a536-56882267761d binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1782.575388] env[62820]: DEBUG nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1782.602414] env[62820]: DEBUG nova.virt.hardware [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1782.603642] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5085955e-f902-444c-8edc-9a22d9da706e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.615502] env[62820]: WARNING nova.network.neutron [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] 26851e2e-dece-4dce-bec8-e64227003b80 already exists in list: networks containing: ['26851e2e-dece-4dce-bec8-e64227003b80']. ignoring it [ 1782.627587] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696273, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.629031] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696274, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.631109] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645149d0-4e12-482a-a9a1-907300e18253 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.803509] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.803509] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.803509] env[62820]: DEBUG nova.objects.instance [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lazy-loading 'resources' on Instance uuid 76bd4a09-300d-460e-8442-21b4f6567698 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1782.886754] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "a495b540-806d-4cd8-b340-86fe937867cd" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.886957] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.886957] env[62820]: DEBUG nova.compute.manager [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1782.888470] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6806869f-7ce8-46ca-b355-4c2ecefd2f85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.899752] env[62820]: DEBUG nova.compute.manager [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1782.900553] env[62820]: DEBUG nova.objects.instance [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lazy-loading 'flavor' on Instance uuid a495b540-806d-4cd8-b340-86fe937867cd {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1782.959171] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518403} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.959467] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da/6768101f-8d1d-46be-b0b9-2fdf6cba08da.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1782.959684] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1782.959947] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3afab39-8bb7-45df-816b-41fe4148f0db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.968969] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1782.968969] env[62820]: value = "task-1696275" [ 1782.968969] env[62820]: _type = "Task" [ 1782.968969] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.973530] env[62820]: DEBUG nova.network.neutron [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5120b8e3-6688-4386-9c99-ee01add07316", "address": "fa:16:3e:02:c5:f6", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5120b8e3-66", "ovs_interfaceid": "5120b8e3-6688-4386-9c99-ee01add07316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.986238] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "a8803178-7fa3-42ea-824c-901063673062" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.986238] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.987653] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.115757] env[62820]: DEBUG oslo_vmware.api [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696274, 'name': PowerOnVM_Task, 'duration_secs': 0.783716} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.121198] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1783.129541] env[62820]: DEBUG oslo_vmware.api [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696273, 'name': PowerOnVM_Task, 'duration_secs': 0.788221} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.130056] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1783.131502] env[62820]: INFO nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Took 10.31 seconds to spawn the instance on the hypervisor. [ 1783.132205] env[62820]: DEBUG nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1783.135191] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e68f563-3ef1-4731-b2ec-fc911c7c6ea5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.140428] env[62820]: DEBUG nova.network.neutron [-] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.143662] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Received event network-vif-plugged-94134340-ccc9-4f22-af2f-2b68424f6ec3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1783.143870] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Acquiring lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.144097] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.144782] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.144782] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] No waiting events found dispatching network-vif-plugged-94134340-ccc9-4f22-af2f-2b68424f6ec3 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1783.144782] env[62820]: WARNING nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Received unexpected event network-vif-plugged-94134340-ccc9-4f22-af2f-2b68424f6ec3 for instance with vm_state building and task_state spawning. [ 1783.144782] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Received event network-changed-94134340-ccc9-4f22-af2f-2b68424f6ec3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1783.145024] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Refreshing instance network info cache due to event network-changed-94134340-ccc9-4f22-af2f-2b68424f6ec3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1783.145204] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Acquiring lock "refresh_cache-6768101f-8d1d-46be-b0b9-2fdf6cba08da" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.145332] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Acquired lock "refresh_cache-6768101f-8d1d-46be-b0b9-2fdf6cba08da" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.145539] env[62820]: DEBUG nova.network.neutron [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Refreshing network info cache for port 94134340-ccc9-4f22-af2f-2b68424f6ec3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1783.240045] env[62820]: DEBUG nova.compute.manager [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1783.240045] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8887a8-fe4d-495d-b110-93ce71efae75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.292173] env[62820]: DEBUG nova.compute.manager [req-15601c27-c862-4923-8291-c3cd549e84f0 req-b8a0313f-3620-46d6-93f2-822a73d0f5cb service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Received event network-vif-plugged-f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1783.292298] env[62820]: DEBUG oslo_concurrency.lockutils [req-15601c27-c862-4923-8291-c3cd549e84f0 req-b8a0313f-3620-46d6-93f2-822a73d0f5cb service nova] Acquiring lock "392d8bca-2d8d-42c3-ba14-fc1387c75405-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.292650] env[62820]: DEBUG oslo_concurrency.lockutils [req-15601c27-c862-4923-8291-c3cd549e84f0 req-b8a0313f-3620-46d6-93f2-822a73d0f5cb service nova] Lock "392d8bca-2d8d-42c3-ba14-fc1387c75405-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.292961] env[62820]: DEBUG oslo_concurrency.lockutils [req-15601c27-c862-4923-8291-c3cd549e84f0 req-b8a0313f-3620-46d6-93f2-822a73d0f5cb service nova] Lock "392d8bca-2d8d-42c3-ba14-fc1387c75405-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.293264] env[62820]: DEBUG nova.compute.manager [req-15601c27-c862-4923-8291-c3cd549e84f0 req-b8a0313f-3620-46d6-93f2-822a73d0f5cb service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] No waiting events found dispatching network-vif-plugged-f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1783.293541] env[62820]: WARNING nova.compute.manager [req-15601c27-c862-4923-8291-c3cd549e84f0 req-b8a0313f-3620-46d6-93f2-822a73d0f5cb service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Received unexpected event network-vif-plugged-f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 for instance with vm_state building and task_state spawning. [ 1783.336688] env[62820]: DEBUG nova.network.neutron [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Successfully updated port: f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1783.388802] env[62820]: DEBUG nova.objects.instance [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lazy-loading 'flavor' on Instance uuid 3228cd34-2144-425a-aca6-400cb0991e43 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1783.479285] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.479961] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.480144] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.480407] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075695} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.482747] env[62820]: DEBUG oslo_concurrency.lockutils [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.482936] env[62820]: DEBUG nova.network.neutron [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1783.484520] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e40886-10a8-43eb-82dc-505c3a67145e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.487067] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1783.488735] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f92a40-6b50-46e8-bcfe-c6fd96c22f57 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.491440] env[62820]: DEBUG nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1783.518266] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1783.518532] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1783.518710] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1783.518922] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1783.519118] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1783.519274] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1783.519481] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1783.519638] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1783.519804] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1783.519996] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1783.520224] env[62820]: DEBUG nova.virt.hardware [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1783.526595] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Reconfiguring VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1783.543022] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39a468e5-c5d2-4745-a005-2138b29d2e90 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.564820] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da/6768101f-8d1d-46be-b0b9-2fdf6cba08da.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1783.571146] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.571457] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.571786] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.572359] env[62820]: WARNING oslo_messaging._drivers.amqpdriver [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1783.573292] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a9ef160-0640-4cb2-8c1b-768e59f6b193 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.599538] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1783.599538] env[62820]: value = "task-1696276" [ 1783.599538] env[62820]: _type = "Task" [ 1783.599538] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.601302] env[62820]: DEBUG oslo_vmware.api [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1783.601302] env[62820]: value = "task-1696277" [ 1783.601302] env[62820]: _type = "Task" [ 1783.601302] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.618075] env[62820]: DEBUG oslo_vmware.api [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696277, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.620474] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.647589] env[62820]: INFO nova.compute.manager [-] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Took 1.34 seconds to deallocate network for instance. [ 1783.672035] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdaed26-8e10-40ab-9bb5-4ac1f27d7878 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.676797] env[62820]: INFO nova.compute.manager [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Took 24.85 seconds to build instance. [ 1783.680766] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003c7702-c05e-4f0f-be57-1d75114d1b02 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.722324] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc6d078-7e6a-488c-887a-e6963f5d04c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.736814] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2a5c67-899d-471b-8e60-9c13f3093d91 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.758214] env[62820]: DEBUG nova.compute.provider_tree [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1783.762134] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3060a2dc-4c37-4b45-b7e0-9835db2a427c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 36.542s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.840167] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.840472] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.840472] env[62820]: DEBUG nova.network.neutron [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1783.894816] env[62820]: DEBUG oslo_concurrency.lockutils [None req-31114632-3c94-4956-beee-5cedd55f2be5 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.318s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.912695] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1783.912695] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-573147ea-c800-40fd-b12e-84d431ae72c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.921255] env[62820]: DEBUG oslo_vmware.api [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1783.921255] env[62820]: value = "task-1696278" [ 1783.921255] env[62820]: _type = "Task" [ 1783.921255] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.929338] env[62820]: DEBUG oslo_vmware.api [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.967560] env[62820]: DEBUG nova.network.neutron [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Updated VIF entry in instance network info cache for port 94134340-ccc9-4f22-af2f-2b68424f6ec3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1783.968129] env[62820]: DEBUG nova.network.neutron [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Updating instance_info_cache with network_info: [{"id": "94134340-ccc9-4f22-af2f-2b68424f6ec3", "address": "fa:16:3e:ef:9c:5b", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94134340-cc", "ovs_interfaceid": "94134340-ccc9-4f22-af2f-2b68424f6ec3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.024828] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.119552] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.123358] env[62820]: DEBUG oslo_vmware.api [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.157521] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.178521] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16d81ac7-83d0-410a-9472-5de64c2ae29f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.360s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.229183] env[62820]: INFO nova.compute.manager [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Rescuing [ 1784.229493] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.229652] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.229822] env[62820]: DEBUG nova.network.neutron [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.266131] env[62820]: DEBUG nova.scheduler.client.report [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1784.362898] env[62820]: DEBUG nova.network.neutron [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updated VIF entry in instance network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1784.363383] env[62820]: DEBUG nova.network.neutron [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5120b8e3-6688-4386-9c99-ee01add07316", "address": "fa:16:3e:02:c5:f6", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5120b8e3-66", "ovs_interfaceid": "5120b8e3-6688-4386-9c99-ee01add07316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.377407] env[62820]: DEBUG nova.network.neutron [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1784.431781] env[62820]: DEBUG oslo_vmware.api [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696278, 'name': PowerOffVM_Task, 'duration_secs': 0.26429} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.432087] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1784.432367] env[62820]: DEBUG nova.compute.manager [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1784.433133] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df05f44-3000-4487-b1d9-993c1d2d4189 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.471242] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Releasing lock "refresh_cache-6768101f-8d1d-46be-b0b9-2fdf6cba08da" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.471522] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Received event network-vif-deleted-d46278a6-5202-4c8b-890f-41286051b6d4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1784.471711] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-vif-plugged-5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1784.471900] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Acquiring lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.472116] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.472281] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.472446] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] No waiting events found dispatching network-vif-plugged-5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1784.472613] env[62820]: WARNING nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received unexpected event network-vif-plugged-5120b8e3-6688-4386-9c99-ee01add07316 for instance with vm_state active and task_state None. [ 1784.472777] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-changed-5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1784.472931] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing instance network info cache due to event network-changed-5120b8e3-6688-4386-9c99-ee01add07316. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1784.473108] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.520038] env[62820]: DEBUG nova.network.neutron [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Updating instance_info_cache with network_info: [{"id": "f54d08d7-24e7-4c0b-8b56-118bdc4e2e96", "address": "fa:16:3e:e6:2f:7b", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf54d08d7-24", "ovs_interfaceid": "f54d08d7-24e7-4c0b-8b56-118bdc4e2e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.601601] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.601791] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.601969] env[62820]: DEBUG nova.network.neutron [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.615496] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696276, 'name': ReconfigVM_Task, 'duration_secs': 0.605664} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.616113] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da/6768101f-8d1d-46be-b0b9-2fdf6cba08da.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1784.617038] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b0a58ce-4d30-445a-8831-ad94a2c1c27d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.621579] env[62820]: DEBUG oslo_vmware.api [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696277, 'name': ReconfigVM_Task, 'duration_secs': 0.710006} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.622314] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.622526] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Reconfigured VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1784.627583] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1784.627583] env[62820]: value = "task-1696279" [ 1784.627583] env[62820]: _type = "Task" [ 1784.627583] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.635825] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696279, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.771495] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.776130] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.751s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.779152] env[62820]: INFO nova.compute.claims [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1784.791933] env[62820]: INFO nova.scheduler.client.report [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Deleted allocations for instance 76bd4a09-300d-460e-8442-21b4f6567698 [ 1784.869031] env[62820]: DEBUG oslo_concurrency.lockutils [req-43b247dd-3ce1-41d2-84df-1222d651d2d8 req-e7d13d4a-784b-4aec-b896-fa6623d1ed86 service nova] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.869031] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.869031] env[62820]: DEBUG nova.network.neutron [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing network info cache for port 5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1784.946033] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ba9e3d4-b2ae-4408-95f3-7be135130b96 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.058s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.976790] env[62820]: DEBUG nova.network.neutron [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [{"id": "1c306539-7756-458b-84e7-61bfbc0c7f35", "address": "fa:16:3e:68:a0:af", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c306539-77", "ovs_interfaceid": "1c306539-7756-458b-84e7-61bfbc0c7f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.023071] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.023422] env[62820]: DEBUG nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Instance network_info: |[{"id": "f54d08d7-24e7-4c0b-8b56-118bdc4e2e96", "address": "fa:16:3e:e6:2f:7b", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf54d08d7-24", "ovs_interfaceid": "f54d08d7-24e7-4c0b-8b56-118bdc4e2e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1785.023844] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:2f:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9a1e09ef-7c9c-45d9-9bf4-55b913524948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f54d08d7-24e7-4c0b-8b56-118bdc4e2e96', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1785.032432] env[62820]: DEBUG oslo.service.loopingcall [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1785.033321] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1785.033637] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33314827-a02d-49c1-8c75-c861f4f6f68f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.055178] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1785.055178] env[62820]: value = "task-1696280" [ 1785.055178] env[62820]: _type = "Task" [ 1785.055178] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.064191] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696280, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.127149] env[62820]: DEBUG oslo_concurrency.lockutils [None req-604f1398-be8f-4552-b325-eea5a14efa2c tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-9c0d9676-9db9-4be2-a8e6-84bd816234aa-5120b8e3-6688-4386-9c99-ee01add07316" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.750s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.138225] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696279, 'name': Rename_Task, 'duration_secs': 0.151692} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.138413] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1785.138714] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59134094-73f8-4708-8024-ad56f050e85c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.150236] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1785.150236] env[62820]: value = "task-1696281" [ 1785.150236] env[62820]: _type = "Task" [ 1785.150236] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.161681] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696281, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.294134] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "f78bf828-b9ab-480e-bd58-3dd8587780ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.294884] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "f78bf828-b9ab-480e-bd58-3dd8587780ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.300278] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f190dc36-e778-489c-87fc-d1396ce7de3d tempest-ServersV294TestFqdnHostnames-200494873 tempest-ServersV294TestFqdnHostnames-200494873-project-member] Lock "76bd4a09-300d-460e-8442-21b4f6567698" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.096s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.324850] env[62820]: DEBUG nova.network.neutron [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance_info_cache with network_info: [{"id": "778fee60-5af2-4328-a536-56882267761d", "address": "fa:16:3e:92:ed:c9", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap778fee60-5a", "ovs_interfaceid": "778fee60-5af2-4328-a536-56882267761d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.363870] env[62820]: DEBUG nova.compute.manager [req-ba504918-3f71-4d5e-8d18-3d6d3aab7f5f req-0d877037-186a-4be6-b49b-0f009a6587bc service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Received event network-changed-f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1785.364143] env[62820]: DEBUG nova.compute.manager [req-ba504918-3f71-4d5e-8d18-3d6d3aab7f5f req-0d877037-186a-4be6-b49b-0f009a6587bc service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Refreshing instance network info cache due to event network-changed-f54d08d7-24e7-4c0b-8b56-118bdc4e2e96. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1785.364322] env[62820]: DEBUG oslo_concurrency.lockutils [req-ba504918-3f71-4d5e-8d18-3d6d3aab7f5f req-0d877037-186a-4be6-b49b-0f009a6587bc service nova] Acquiring lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.364470] env[62820]: DEBUG oslo_concurrency.lockutils [req-ba504918-3f71-4d5e-8d18-3d6d3aab7f5f req-0d877037-186a-4be6-b49b-0f009a6587bc service nova] Acquired lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.364633] env[62820]: DEBUG nova.network.neutron [req-ba504918-3f71-4d5e-8d18-3d6d3aab7f5f req-0d877037-186a-4be6-b49b-0f009a6587bc service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Refreshing network info cache for port f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1785.480014] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.567990] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696280, 'name': CreateVM_Task, 'duration_secs': 0.385794} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.568183] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1785.568908] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.569070] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.569398] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1785.570056] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-371ffd3b-3ac8-4334-8a61-c90852df66bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.575221] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1785.575221] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520815a3-4a39-481b-590d-d054beea9ab2" [ 1785.575221] env[62820]: _type = "Task" [ 1785.575221] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.584451] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520815a3-4a39-481b-590d-d054beea9ab2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.587911] env[62820]: DEBUG nova.network.neutron [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updated VIF entry in instance network info cache for port 5120b8e3-6688-4386-9c99-ee01add07316. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1785.588368] env[62820]: DEBUG nova.network.neutron [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5120b8e3-6688-4386-9c99-ee01add07316", "address": "fa:16:3e:02:c5:f6", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5120b8e3-66", "ovs_interfaceid": "5120b8e3-6688-4386-9c99-ee01add07316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.661092] env[62820]: DEBUG oslo_vmware.api [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696281, 'name': PowerOnVM_Task, 'duration_secs': 0.509176} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.661328] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1785.661581] env[62820]: INFO nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Took 7.93 seconds to spawn the instance on the hypervisor. [ 1785.661719] env[62820]: DEBUG nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1785.662570] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353f1edb-54c3-4fe9-ac2e-ed6ec003ab8e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.797486] env[62820]: DEBUG nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1785.827812] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.080550] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428bdb95-5422-4d77-b690-a01ab24d7629 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.092111] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7031ed9b-c30f-4a1b-b45b-3e4dc91d3fae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.095483] env[62820]: DEBUG oslo_concurrency.lockutils [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.095746] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Received event network-vif-deleted-8cea8850-c5a0-4831-99cc-8920c44710b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1786.095928] env[62820]: INFO nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Neutron deleted interface 8cea8850-c5a0-4831-99cc-8920c44710b7; detaching it from the instance and deleting it from the info cache [ 1786.096126] env[62820]: DEBUG nova.network.neutron [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.097263] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520815a3-4a39-481b-590d-d054beea9ab2, 'name': SearchDatastore_Task, 'duration_secs': 0.013925} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.100381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.100606] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1786.100953] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.101136] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.101324] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1786.102208] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a356fb1b-52fa-4f4c-a93c-1eea89e9109a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.134254] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b353e6ee-9660-4f37-a588-d3a2cc526732 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.136905] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1786.137104] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1786.138041] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdb8bdf5-2535-401b-b0a4-dc65999386ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.147889] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1786.147889] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d1fd51-0e54-4d00-17d6-f71aee1f5d33" [ 1786.147889] env[62820]: _type = "Task" [ 1786.147889] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.149162] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee33c929-e67c-4b88-afe3-74fcbd336605 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.170602] env[62820]: DEBUG nova.compute.provider_tree [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1786.179292] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d1fd51-0e54-4d00-17d6-f71aee1f5d33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.181910] env[62820]: INFO nova.compute.manager [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Took 22.54 seconds to build instance. [ 1786.194798] env[62820]: INFO nova.compute.manager [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Rebuilding instance [ 1786.252770] env[62820]: DEBUG nova.compute.manager [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1786.253668] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ea9bb1-c2a2-4d38-918f-439414d3e101 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.311185] env[62820]: DEBUG nova.network.neutron [req-ba504918-3f71-4d5e-8d18-3d6d3aab7f5f req-0d877037-186a-4be6-b49b-0f009a6587bc service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Updated VIF entry in instance network info cache for port f54d08d7-24e7-4c0b-8b56-118bdc4e2e96. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1786.311547] env[62820]: DEBUG nova.network.neutron [req-ba504918-3f71-4d5e-8d18-3d6d3aab7f5f req-0d877037-186a-4be6-b49b-0f009a6587bc service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Updating instance_info_cache with network_info: [{"id": "f54d08d7-24e7-4c0b-8b56-118bdc4e2e96", "address": "fa:16:3e:e6:2f:7b", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf54d08d7-24", "ovs_interfaceid": "f54d08d7-24e7-4c0b-8b56-118bdc4e2e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.322962] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.349432] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5978b381-6bd1-4d0c-b1d0-b9f3f5ec496f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.374261] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-9c0d9676-9db9-4be2-a8e6-84bd816234aa-5120b8e3-6688-4386-9c99-ee01add07316" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.374545] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-9c0d9676-9db9-4be2-a8e6-84bd816234aa-5120b8e3-6688-4386-9c99-ee01add07316" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.376694] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3962e4-5a40-4b1d-85c4-04ca9c5e9b44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.387208] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance '35b95400-6399-48ae-b7d5-420c33d653dd' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1786.525574] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1786.525973] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcaef3db-879a-443c-8be6-09c482e988a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.537712] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1786.537712] env[62820]: value = "task-1696282" [ 1786.537712] env[62820]: _type = "Task" [ 1786.537712] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.547417] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.600882] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7cf99b65-a247-40af-b8fa-f74702fdf37c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.611710] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e940c047-4028-4870-b34b-86957b067af9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.648797] env[62820]: DEBUG nova.compute.manager [req-867c1d24-a577-4898-b72c-bff4cb6351a2 req-8347a5fa-dbd8-44ea-8d69-0b87204d7a64 service nova] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Detach interface failed, port_id=8cea8850-c5a0-4831-99cc-8920c44710b7, reason: Instance 4323e7df-136f-4bbe-8160-fd7b2579727e could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1786.663467] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d1fd51-0e54-4d00-17d6-f71aee1f5d33, 'name': SearchDatastore_Task, 'duration_secs': 0.028437} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.664282] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e36f62a-ec1e-4c50-a4d6-fa532957a798 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.675307] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1786.675307] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52479da4-04f7-11b2-2d26-3a957e3725d7" [ 1786.675307] env[62820]: _type = "Task" [ 1786.675307] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.681158] env[62820]: DEBUG nova.scheduler.client.report [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1786.687845] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7d3071ce-a1a8-47bd-9454-42b96a723129 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.314s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.690113] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52479da4-04f7-11b2-2d26-3a957e3725d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.815995] env[62820]: DEBUG oslo_concurrency.lockutils [req-ba504918-3f71-4d5e-8d18-3d6d3aab7f5f req-0d877037-186a-4be6-b49b-0f009a6587bc service nova] Releasing lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.877903] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.878254] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.879724] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1b33e5-27ef-42ce-b547-7891d58f23ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.905776] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1786.907053] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de789ee8-b4c2-4be3-94e5-df0372eab464 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.910018] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ec0233-b357-4cc0-95cc-77961ab88114 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.949809] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Reconfiguring VM to detach interface {{(pid=62820) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1786.951832] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5864fbda-3577-4121-a8f7-666ba5af4654 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.972950] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1786.972950] env[62820]: value = "task-1696283" [ 1786.972950] env[62820]: _type = "Task" [ 1786.972950] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.980452] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1786.980452] env[62820]: value = "task-1696284" [ 1786.980452] env[62820]: _type = "Task" [ 1786.980452] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.990098] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696283, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.997891] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.048726] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696282, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.133094] env[62820]: INFO nova.compute.manager [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Rebuilding instance [ 1787.188428] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52479da4-04f7-11b2-2d26-3a957e3725d7, 'name': SearchDatastore_Task, 'duration_secs': 0.026184} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.191370] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.191786] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 392d8bca-2d8d-42c3-ba14-fc1387c75405/392d8bca-2d8d-42c3-ba14-fc1387c75405.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1787.192627] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.417s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.193221] env[62820]: DEBUG nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1787.199134] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce373c37-f102-4f89-aae2-3eb045ac4102 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.200242] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.043s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.200467] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.202749] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.880s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.204143] env[62820]: INFO nova.compute.claims [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1787.207413] env[62820]: DEBUG nova.compute.manager [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1787.209641] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa5e504-c841-4180-9c25-8b51a5977c48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.221832] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1787.221832] env[62820]: value = "task-1696285" [ 1787.221832] env[62820]: _type = "Task" [ 1787.221832] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.238896] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.242833] env[62820]: INFO nova.scheduler.client.report [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Deleted allocations for instance 4323e7df-136f-4bbe-8160-fd7b2579727e [ 1787.268161] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1787.269066] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97a5fab7-81d6-49bf-ae49-c7a9e0b39b2a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.277338] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1787.277338] env[62820]: value = "task-1696286" [ 1787.277338] env[62820]: _type = "Task" [ 1787.277338] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.286395] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.488468] env[62820]: DEBUG oslo_vmware.api [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696283, 'name': PowerOnVM_Task, 'duration_secs': 0.562701} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.490920] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1787.490920] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6a95989a-824f-47da-8d86-e0fba7d471f3 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance '35b95400-6399-48ae-b7d5-420c33d653dd' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1787.498308] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.550717] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696282, 'name': PowerOffVM_Task, 'duration_secs': 0.519544} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.551025] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1787.551908] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f330f5ee-c388-48b4-b2b7-e846c06a38b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.593569] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae0b4fe-4ddb-497a-9af5-1d60a1120a69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.642574] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1787.643070] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e9cb6f8-e4bd-41d9-9601-126d6d24a47d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.652987] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1787.652987] env[62820]: value = "task-1696287" [ 1787.652987] env[62820]: _type = "Task" [ 1787.652987] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.665871] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1787.666178] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.666447] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.666599] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.666806] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.667185] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a2b02de-b0e3-46b8-98f8-73104ed76f31 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.694433] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.695061] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1787.697382] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76e666ab-8267-47ea-8d75-c1d029852892 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.705480] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1787.705480] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52167f0e-130e-1972-2bab-08c02eb98dbb" [ 1787.705480] env[62820]: _type = "Task" [ 1787.705480] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.711663] env[62820]: DEBUG nova.compute.utils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1787.713659] env[62820]: DEBUG nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1787.713909] env[62820]: DEBUG nova.network.neutron [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1787.729307] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52167f0e-130e-1972-2bab-08c02eb98dbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.743796] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696285, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.754453] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5ad390f1-0545-4269-8152-0ce45a48967a tempest-DeleteServersTestJSON-728742032 tempest-DeleteServersTestJSON-728742032-project-member] Lock "4323e7df-136f-4bbe-8160-fd7b2579727e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.094s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.796065] env[62820]: DEBUG nova.policy [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd41e844bb294c6ab6e3869af994f60a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fef128f5c704730b335b62f6cce0416', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1787.798300] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1787.798677] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1787.800316] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b4a955-6310-4c4b-bfaa-0c7afb57613b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.813461] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1787.813859] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-466852b5-c2b7-42b6-8cb4-577959f9eba6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.901853] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1787.902233] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1787.902480] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleting the datastore file [datastore1] a495b540-806d-4cd8-b340-86fe937867cd {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1787.902777] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-468fe8b5-e7f6-4d97-9e03-9157ae62e4fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.912423] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1787.912423] env[62820]: value = "task-1696289" [ 1787.912423] env[62820]: _type = "Task" [ 1787.912423] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.922380] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.994616] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.220933] env[62820]: DEBUG nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1788.226472] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52167f0e-130e-1972-2bab-08c02eb98dbb, 'name': SearchDatastore_Task, 'duration_secs': 0.063674} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.228647] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be0f7694-a559-46d3-adce-c1a43d4c5d4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.234542] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1788.239480] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb7c4e78-edf7-4030-869c-392e600c1e89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.247761] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1788.247761] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5296d9cb-16c3-904c-5f23-2e45a30684f3" [ 1788.247761] env[62820]: _type = "Task" [ 1788.247761] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.267380] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1788.267380] env[62820]: value = "task-1696291" [ 1788.267380] env[62820]: _type = "Task" [ 1788.267380] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.267632] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597027} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.270819] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 392d8bca-2d8d-42c3-ba14-fc1387c75405/392d8bca-2d8d-42c3-ba14-fc1387c75405.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1788.271069] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1788.274552] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef328b17-5e74-4662-8ad6-061884667b26 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.281422] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5296d9cb-16c3-904c-5f23-2e45a30684f3, 'name': SearchDatastore_Task, 'duration_secs': 0.019632} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.282691] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.283066] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. {{(pid=62820) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1788.283569] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a615cb53-858c-427c-bd51-f71800f19d69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.289124] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.293366] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1788.293366] env[62820]: value = "task-1696292" [ 1788.293366] env[62820]: _type = "Task" [ 1788.293366] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.297504] env[62820]: DEBUG nova.network.neutron [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Successfully created port: f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1788.301813] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1788.301813] env[62820]: value = "task-1696293" [ 1788.301813] env[62820]: _type = "Task" [ 1788.301813] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.308677] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696292, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.317455] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696293, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.423108] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30338} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.425649] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1788.425858] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1788.426048] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1788.496588] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.555856] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f5f92c-9b6a-423b-8aed-ea4706cd8e43 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.568203] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666f8f74-710b-4867-87d7-0ffd072c5831 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.613909] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90ecf6b-6317-413d-aa57-a22bf906767d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.623773] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef653a3-8c4b-4480-8a64-5c98f76683e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.642015] env[62820]: DEBUG nova.compute.provider_tree [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1788.782677] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696291, 'name': PowerOffVM_Task, 'duration_secs': 0.397145} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.783058] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1788.783316] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1788.784132] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7534d304-b75f-4eba-8e33-78c2159adb3c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.792302] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1788.792549] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eeaf7f41-2d55-4a52-b07c-03d134d58189 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.804188] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696292, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076522} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.807836] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1788.808632] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697a0e07-3f21-41b3-95a3-7b25cb75a143 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.820339] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696293, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.837419] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 392d8bca-2d8d-42c3-ba14-fc1387c75405/392d8bca-2d8d-42c3-ba14-fc1387c75405.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1788.837747] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4739060-2b1a-4944-9b3d-fd1c8b8bde92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.865567] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1788.865567] env[62820]: value = "task-1696295" [ 1788.865567] env[62820]: _type = "Task" [ 1788.865567] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.880692] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696295, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.940320] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1788.940671] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1788.940780] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleting the datastore file [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1788.941214] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08323a3e-69fb-4420-b214-acae4619eac8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.950994] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1788.950994] env[62820]: value = "task-1696296" [ 1788.950994] env[62820]: _type = "Task" [ 1788.950994] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.962854] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696296, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.996338] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.146468] env[62820]: DEBUG nova.scheduler.client.report [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1789.233429] env[62820]: DEBUG nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1789.270042] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1789.270505] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1789.270688] env[62820]: DEBUG nova.virt.hardware [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1789.271552] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb2c417-1bf4-4e11-951e-93c9d4e3d5bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.280302] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfc80f5-2cd7-488c-83a8-26c24524cb5c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.313971] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696293, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.850076} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.314265] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. [ 1789.315079] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d92bf8b-65f5-435c-b8a9-002c51527dc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.344844] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.345251] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aee96a6e-44c8-4be9-ad78-1d0a277d1a70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.367145] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1789.367145] env[62820]: value = "task-1696297" [ 1789.367145] env[62820]: _type = "Task" [ 1789.367145] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.379796] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696295, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.383030] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696297, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.459660] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1789.459660] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1789.459660] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1789.459844] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1789.459961] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1789.460144] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1789.460351] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1789.460511] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1789.460678] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1789.460847] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1789.461036] env[62820]: DEBUG nova.virt.hardware [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1789.461851] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ca9836-779c-4276-8223-2f9891495ae7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.467508] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696296, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.381785} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.468214] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1789.468401] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1789.468584] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1789.481996] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21ea720-6353-4035-bba9-6103dd3633e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.494359] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.502488] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:1a:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18cc900d-6813-4f95-b166-a6b1a486f112', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1789.510420] env[62820]: DEBUG oslo.service.loopingcall [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1789.511279] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1789.511531] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be29cb6b-fc40-4b35-8145-031bd755ffe3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.535047] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1789.535047] env[62820]: value = "task-1696298" [ 1789.535047] env[62820]: _type = "Task" [ 1789.535047] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.544671] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696298, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.656019] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.453s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.656628] env[62820]: DEBUG nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1789.891548] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696295, 'name': ReconfigVM_Task, 'duration_secs': 0.758694} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.896167] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 392d8bca-2d8d-42c3-ba14-fc1387c75405/392d8bca-2d8d-42c3-ba14-fc1387c75405.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1789.900319] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696297, 'name': ReconfigVM_Task, 'duration_secs': 0.457321} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.900319] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-311cd66f-9ad9-4bef-b6ff-1f1d5ae42561 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.901733] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1789.904282] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53266ddb-f231-442d-9b45-7212665df2e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.940649] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-733c8eb8-7fee-4504-bdc6-23fa44790da3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.956111] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1789.956111] env[62820]: value = "task-1696299" [ 1789.956111] env[62820]: _type = "Task" [ 1789.956111] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.962356] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1789.962356] env[62820]: value = "task-1696300" [ 1789.962356] env[62820]: _type = "Task" [ 1789.962356] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.967626] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696299, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.981205] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.997465] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.050316] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696298, 'name': CreateVM_Task, 'duration_secs': 0.472472} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.050534] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1790.051352] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.051529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.051905] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1790.052549] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-144be1d4-66a5-4503-904c-b849466aa6fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.059881] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1790.059881] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5239d401-6edc-1268-bf82-449f324cbe8b" [ 1790.059881] env[62820]: _type = "Task" [ 1790.059881] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.062784] env[62820]: DEBUG nova.compute.manager [req-c896cf7f-7462-4e76-b42e-9039fc4d3115 req-f22d2f0d-3b43-45cd-bf97-52065271282c service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Received event network-vif-plugged-f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1790.062784] env[62820]: DEBUG oslo_concurrency.lockutils [req-c896cf7f-7462-4e76-b42e-9039fc4d3115 req-f22d2f0d-3b43-45cd-bf97-52065271282c service nova] Acquiring lock "a8803178-7fa3-42ea-824c-901063673062-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.062784] env[62820]: DEBUG oslo_concurrency.lockutils [req-c896cf7f-7462-4e76-b42e-9039fc4d3115 req-f22d2f0d-3b43-45cd-bf97-52065271282c service nova] Lock "a8803178-7fa3-42ea-824c-901063673062-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.062884] env[62820]: DEBUG oslo_concurrency.lockutils [req-c896cf7f-7462-4e76-b42e-9039fc4d3115 req-f22d2f0d-3b43-45cd-bf97-52065271282c service nova] Lock "a8803178-7fa3-42ea-824c-901063673062-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.063209] env[62820]: DEBUG nova.compute.manager [req-c896cf7f-7462-4e76-b42e-9039fc4d3115 req-f22d2f0d-3b43-45cd-bf97-52065271282c service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] No waiting events found dispatching network-vif-plugged-f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1790.063284] env[62820]: WARNING nova.compute.manager [req-c896cf7f-7462-4e76-b42e-9039fc4d3115 req-f22d2f0d-3b43-45cd-bf97-52065271282c service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Received unexpected event network-vif-plugged-f7027439-2429-4746-8bc9-a95ce975c96a for instance with vm_state building and task_state spawning. [ 1790.073590] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5239d401-6edc-1268-bf82-449f324cbe8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.122504] env[62820]: DEBUG nova.network.neutron [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Successfully updated port: f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1790.167973] env[62820]: DEBUG nova.compute.utils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1790.169571] env[62820]: DEBUG nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1790.169758] env[62820]: DEBUG nova.network.neutron [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1790.212626] env[62820]: DEBUG nova.policy [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba8e4dc4cd634bf293d02187fbc77b72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ca1b6f7bda3437eb67f5f765b5864a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1790.373025] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "35b95400-6399-48ae-b7d5-420c33d653dd" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.373025] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.373025] env[62820]: DEBUG nova.compute.manager [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Going to confirm migration 6 {{(pid=62820) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5127}} [ 1790.467401] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696299, 'name': Rename_Task, 'duration_secs': 0.428087} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.470595] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1790.470870] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cc4a43d-cc7f-4222-a7e7-f20d5d2288af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.484023] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696300, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.485482] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1790.485482] env[62820]: value = "task-1696301" [ 1790.485482] env[62820]: _type = "Task" [ 1790.485482] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.497810] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.501573] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696301, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.527991] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.529574] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.529829] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.530036] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.530508] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.530696] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.530945] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.531127] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.531322] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.531493] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.531928] env[62820]: DEBUG nova.virt.hardware [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.533475] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57c91c6-a762-45fe-8423-c6cdcb95733c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.537809] env[62820]: DEBUG nova.network.neutron [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Successfully created port: 9b1b9e77-da64-4b99-b993-0175cd83f6b7 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1790.545750] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec319df8-6dd9-485f-9bd3-80a55815faba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.562767] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:9c:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94134340-ccc9-4f22-af2f-2b68424f6ec3', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1790.570400] env[62820]: DEBUG oslo.service.loopingcall [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1790.576024] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1790.576024] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a97740ca-eb49-49c8-99a1-75c6fbbd1fd8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.598024] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5239d401-6edc-1268-bf82-449f324cbe8b, 'name': SearchDatastore_Task, 'duration_secs': 0.044544} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.599837] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.600137] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1790.600414] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.600593] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.600846] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1790.601764] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1790.601764] env[62820]: value = "task-1696302" [ 1790.601764] env[62820]: _type = "Task" [ 1790.601764] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.601983] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cab6040-5a2a-4cf1-ad41-04a99c9cd686 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.617068] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696302, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.619137] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1790.619422] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1790.620819] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1027061b-8842-42a3-bd35-f8fd5c326d42 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.625715] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.625868] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.626018] env[62820]: DEBUG nova.network.neutron [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1790.629060] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1790.629060] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5210ba5e-f5ff-d718-d90a-7b66e34bb434" [ 1790.629060] env[62820]: _type = "Task" [ 1790.629060] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.639929] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5210ba5e-f5ff-d718-d90a-7b66e34bb434, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.672755] env[62820]: DEBUG nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1790.923503] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.923699] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.923877] env[62820]: DEBUG nova.network.neutron [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1790.924170] env[62820]: DEBUG nova.objects.instance [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lazy-loading 'info_cache' on Instance uuid 35b95400-6399-48ae-b7d5-420c33d653dd {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1790.981337] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696300, 'name': ReconfigVM_Task, 'duration_secs': 0.580936} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.981337] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1790.981568] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80e5ebbf-b1f6-484e-a326-536955e12686 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.993901] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1790.993901] env[62820]: value = "task-1696303" [ 1790.993901] env[62820]: _type = "Task" [ 1790.993901] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.005174] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696301, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.016144] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.016783] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.114731] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696302, 'name': CreateVM_Task, 'duration_secs': 0.450518} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.114986] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1791.116155] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.116155] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.116256] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1791.116550] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b1dc1f9-5e56-4e4d-ba33-4eba2be60088 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.123542] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1791.123542] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527a5d54-b243-b76a-872c-4afb0ccb7acd" [ 1791.123542] env[62820]: _type = "Task" [ 1791.123542] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.133782] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527a5d54-b243-b76a-872c-4afb0ccb7acd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.147020] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5210ba5e-f5ff-d718-d90a-7b66e34bb434, 'name': SearchDatastore_Task, 'duration_secs': 0.023436} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.148214] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b21218a-cdea-45c2-a13a-203da4e6fb87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.155124] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1791.155124] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b41bd7-b587-8b9d-1e8f-04a43eb06571" [ 1791.155124] env[62820]: _type = "Task" [ 1791.155124] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.164164] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b41bd7-b587-8b9d-1e8f-04a43eb06571, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.205431] env[62820]: DEBUG nova.network.neutron [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1791.442389] env[62820]: DEBUG nova.network.neutron [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [{"id": "f7027439-2429-4746-8bc9-a95ce975c96a", "address": "fa:16:3e:80:90:a0", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7027439-24", "ovs_interfaceid": "f7027439-2429-4746-8bc9-a95ce975c96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1791.510485] env[62820]: DEBUG oslo_vmware.api [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696301, 'name': PowerOnVM_Task, 'duration_secs': 0.626875} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.511107] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.511784] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1791.512012] env[62820]: INFO nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1791.512214] env[62820]: DEBUG nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1791.512997] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cca3f3-6b10-4abb-beb5-8861bd207578 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.523489] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696303, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.642209] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527a5d54-b243-b76a-872c-4afb0ccb7acd, 'name': SearchDatastore_Task, 'duration_secs': 0.013935} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.642569] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.642824] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1791.643099] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.666584] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b41bd7-b587-8b9d-1e8f-04a43eb06571, 'name': SearchDatastore_Task, 'duration_secs': 0.016412} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.666962] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.667425] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a495b540-806d-4cd8-b340-86fe937867cd/a495b540-806d-4cd8-b340-86fe937867cd.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1791.667561] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.667749] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1791.667970] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0dd8eac1-2eec-48c4-a45e-04628d269671 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.671155] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf12a513-dda6-452f-8f5b-db44f90f3f75 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.685889] env[62820]: DEBUG nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1791.689384] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1791.689384] env[62820]: value = "task-1696304" [ 1791.689384] env[62820]: _type = "Task" [ 1791.689384] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.689384] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1791.689384] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1791.691781] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbedbbce-0ec9-432d-83ac-75bd0d6e54ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.705464] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1791.705464] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e19180-3880-d827-8727-39529a928ff8" [ 1791.705464] env[62820]: _type = "Task" [ 1791.705464] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.705707] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696304, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.715388] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e19180-3880-d827-8727-39529a928ff8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.725844] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1791.726245] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1791.726556] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1791.726878] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1791.728031] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1791.728031] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1791.728031] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1791.728031] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1791.728031] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1791.728366] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1791.728410] env[62820]: DEBUG nova.virt.hardware [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1791.729876] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b43914-865a-42c7-a441-eb46ea23c1fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.738234] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ff01d9-26f6-43ad-b559-4b22cc78fc74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.950672] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.951116] env[62820]: DEBUG nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Instance network_info: |[{"id": "f7027439-2429-4746-8bc9-a95ce975c96a", "address": "fa:16:3e:80:90:a0", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7027439-24", "ovs_interfaceid": "f7027439-2429-4746-8bc9-a95ce975c96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1791.952740] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:90:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7027439-2429-4746-8bc9-a95ce975c96a', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1791.965275] env[62820]: DEBUG oslo.service.loopingcall [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1791.965677] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8803178-7fa3-42ea-824c-901063673062] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1791.965913] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a8286a4-ec13-4aa2-a2c0-0eb0827f21f8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.990781] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1791.990781] env[62820]: value = "task-1696305" [ 1791.990781] env[62820]: _type = "Task" [ 1791.990781] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.010509] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696305, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.017940] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.022552] env[62820]: DEBUG oslo_vmware.api [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696303, 'name': PowerOnVM_Task, 'duration_secs': 0.939293} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.022905] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1792.026384] env[62820]: DEBUG nova.compute.manager [None req-4ac6a600-e681-4e12-8cee-be3ef76de312 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1792.030305] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bec350-b7b1-45d6-aa34-c6b0b4d0700b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.047911] env[62820]: INFO nova.compute.manager [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Took 21.73 seconds to build instance. [ 1792.202021] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696304, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.224918] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e19180-3880-d827-8727-39529a928ff8, 'name': SearchDatastore_Task, 'duration_secs': 0.017439} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.225816] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfce7870-54f8-42db-ad98-2b572232ef20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.235472] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1792.235472] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5201d91c-d35f-4de8-fff4-fa10369fe6af" [ 1792.235472] env[62820]: _type = "Task" [ 1792.235472] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.248280] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5201d91c-d35f-4de8-fff4-fa10369fe6af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.316355] env[62820]: DEBUG nova.compute.manager [req-fbb62549-694e-4ec2-9f9c-f3f14f6bc516 req-70886773-1dce-470c-bebe-0c2de21bde81 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Received event network-changed-f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1792.317360] env[62820]: DEBUG nova.compute.manager [req-fbb62549-694e-4ec2-9f9c-f3f14f6bc516 req-70886773-1dce-470c-bebe-0c2de21bde81 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Refreshing instance network info cache due to event network-changed-f7027439-2429-4746-8bc9-a95ce975c96a. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1792.317360] env[62820]: DEBUG oslo_concurrency.lockutils [req-fbb62549-694e-4ec2-9f9c-f3f14f6bc516 req-70886773-1dce-470c-bebe-0c2de21bde81 service nova] Acquiring lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.317360] env[62820]: DEBUG oslo_concurrency.lockutils [req-fbb62549-694e-4ec2-9f9c-f3f14f6bc516 req-70886773-1dce-470c-bebe-0c2de21bde81 service nova] Acquired lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.317360] env[62820]: DEBUG nova.network.neutron [req-fbb62549-694e-4ec2-9f9c-f3f14f6bc516 req-70886773-1dce-470c-bebe-0c2de21bde81 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Refreshing network info cache for port f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1792.445208] env[62820]: DEBUG nova.network.neutron [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance_info_cache with network_info: [{"id": "778fee60-5af2-4328-a536-56882267761d", "address": "fa:16:3e:92:ed:c9", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap778fee60-5a", "ovs_interfaceid": "778fee60-5af2-4328-a536-56882267761d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.502257] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696305, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.513468] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.552112] env[62820]: DEBUG oslo_concurrency.lockutils [None req-23e9562a-0d86-4f51-86b7-dac43a1cf718 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "392d8bca-2d8d-42c3-ba14-fc1387c75405" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.241s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.703477] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696304, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610012} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.703973] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a495b540-806d-4cd8-b340-86fe937867cd/a495b540-806d-4cd8-b340-86fe937867cd.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1792.704370] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1792.704755] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78e901a1-8a7b-4684-afd8-28f04ac26a2a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.718751] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1792.718751] env[62820]: value = "task-1696306" [ 1792.718751] env[62820]: _type = "Task" [ 1792.718751] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.728069] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696306, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.749023] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5201d91c-d35f-4de8-fff4-fa10369fe6af, 'name': SearchDatastore_Task, 'duration_secs': 0.029106} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.749023] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.749023] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da/6768101f-8d1d-46be-b0b9-2fdf6cba08da.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1792.749023] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a974d3d1-51df-4c3f-8ca5-b3d1c505ca8d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.759809] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1792.759809] env[62820]: value = "task-1696307" [ 1792.759809] env[62820]: _type = "Task" [ 1792.759809] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.772436] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696307, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.774866] env[62820]: DEBUG nova.network.neutron [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Successfully updated port: 9b1b9e77-da64-4b99-b993-0175cd83f6b7 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1792.947741] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-35b95400-6399-48ae-b7d5-420c33d653dd" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.948367] env[62820]: DEBUG nova.objects.instance [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lazy-loading 'migration_context' on Instance uuid 35b95400-6399-48ae-b7d5-420c33d653dd {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1793.005583] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696305, 'name': CreateVM_Task, 'duration_secs': 0.886668} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.006490] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8803178-7fa3-42ea-824c-901063673062] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1793.007150] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.007315] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.007702] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1793.008420] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9886671b-200b-4e81-a89d-cd86081a06f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.014651] env[62820]: DEBUG oslo_vmware.api [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696284, 'name': ReconfigVM_Task, 'duration_secs': 5.852947} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.015938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.016176] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Reconfigured VM to detach interface {{(pid=62820) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1793.020788] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1793.020788] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e8a07f-850a-aec3-668e-9f02c872b039" [ 1793.020788] env[62820]: _type = "Task" [ 1793.020788] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.029864] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e8a07f-850a-aec3-668e-9f02c872b039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.158229] env[62820]: DEBUG nova.compute.manager [req-e665654b-f653-4948-961f-6f3f604929d9 req-80b9875e-be7c-463b-a428-13deacd29879 service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Received event network-changed-f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1793.158229] env[62820]: DEBUG nova.compute.manager [req-e665654b-f653-4948-961f-6f3f604929d9 req-80b9875e-be7c-463b-a428-13deacd29879 service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Refreshing instance network info cache due to event network-changed-f54d08d7-24e7-4c0b-8b56-118bdc4e2e96. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1793.158229] env[62820]: DEBUG oslo_concurrency.lockutils [req-e665654b-f653-4948-961f-6f3f604929d9 req-80b9875e-be7c-463b-a428-13deacd29879 service nova] Acquiring lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.158229] env[62820]: DEBUG oslo_concurrency.lockutils [req-e665654b-f653-4948-961f-6f3f604929d9 req-80b9875e-be7c-463b-a428-13deacd29879 service nova] Acquired lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.158229] env[62820]: DEBUG nova.network.neutron [req-e665654b-f653-4948-961f-6f3f604929d9 req-80b9875e-be7c-463b-a428-13deacd29879 service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Refreshing network info cache for port f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1793.234179] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696306, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096399} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.234350] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1793.236331] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196e1028-e4cd-42f0-a56e-79ae91b80c65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.262815] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] a495b540-806d-4cd8-b340-86fe937867cd/a495b540-806d-4cd8-b340-86fe937867cd.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1793.264026] env[62820]: DEBUG nova.network.neutron [req-fbb62549-694e-4ec2-9f9c-f3f14f6bc516 req-70886773-1dce-470c-bebe-0c2de21bde81 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Updated VIF entry in instance network info cache for port f7027439-2429-4746-8bc9-a95ce975c96a. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1793.264466] env[62820]: DEBUG nova.network.neutron [req-fbb62549-694e-4ec2-9f9c-f3f14f6bc516 req-70886773-1dce-470c-bebe-0c2de21bde81 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [{"id": "f7027439-2429-4746-8bc9-a95ce975c96a", "address": "fa:16:3e:80:90:a0", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7027439-24", "ovs_interfaceid": "f7027439-2429-4746-8bc9-a95ce975c96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.266167] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82e020c3-bd26-4010-80a6-eb28cc4603b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.291741] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "refresh_cache-f78bf828-b9ab-480e-bd58-3dd8587780ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.291741] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "refresh_cache-f78bf828-b9ab-480e-bd58-3dd8587780ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.291741] env[62820]: DEBUG nova.network.neutron [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1793.300258] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696307, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.301942] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1793.301942] env[62820]: value = "task-1696308" [ 1793.301942] env[62820]: _type = "Task" [ 1793.301942] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.312467] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696308, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.454188] env[62820]: DEBUG nova.objects.base [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Object Instance<35b95400-6399-48ae-b7d5-420c33d653dd> lazy-loaded attributes: info_cache,migration_context {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1793.455530] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5de9c6-1421-4193-81c2-10fe667ea036 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.483109] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e185d140-f61b-4850-9e0c-aad7882cf54e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.490969] env[62820]: DEBUG oslo_vmware.api [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1793.490969] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526e57e4-230c-99ed-84fc-ef9706298c12" [ 1793.490969] env[62820]: _type = "Task" [ 1793.490969] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.503539] env[62820]: DEBUG oslo_vmware.api [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526e57e4-230c-99ed-84fc-ef9706298c12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.530036] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e8a07f-850a-aec3-668e-9f02c872b039, 'name': SearchDatastore_Task, 'duration_secs': 0.029279} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.530036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.530255] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1793.530497] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.530646] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.530883] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1793.531154] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1d7cdff-4011-44cd-bdba-d1cc27de2d69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.541842] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1793.542117] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1793.544514] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe63101d-fa26-4a2c-89de-239f7db84a5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.553887] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.554261] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.556395] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1793.556395] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52652b9f-0a69-272e-a5d4-2ee9a18c61fa" [ 1793.556395] env[62820]: _type = "Task" [ 1793.556395] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.566038] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52652b9f-0a69-272e-a5d4-2ee9a18c61fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.649955] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.650213] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.684680] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.684912] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.716327] env[62820]: INFO nova.compute.manager [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Unrescuing [ 1793.716597] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.717678] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquired lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.717678] env[62820]: DEBUG nova.network.neutron [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1793.776407] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696307, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.787309} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.776720] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da/6768101f-8d1d-46be-b0b9-2fdf6cba08da.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1793.776955] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1793.777238] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7361f23f-f456-4166-8547-10583ea2ccc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.785140] env[62820]: DEBUG oslo_concurrency.lockutils [req-fbb62549-694e-4ec2-9f9c-f3f14f6bc516 req-70886773-1dce-470c-bebe-0c2de21bde81 service nova] Releasing lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.785591] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1793.785591] env[62820]: value = "task-1696309" [ 1793.785591] env[62820]: _type = "Task" [ 1793.785591] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.797575] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696309, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.812998] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696308, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.833254] env[62820]: DEBUG nova.network.neutron [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1794.008851] env[62820]: DEBUG oslo_vmware.api [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526e57e4-230c-99ed-84fc-ef9706298c12, 'name': SearchDatastore_Task, 'duration_secs': 0.019941} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.010170] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.010440] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.029176] env[62820]: DEBUG nova.network.neutron [req-e665654b-f653-4948-961f-6f3f604929d9 req-80b9875e-be7c-463b-a428-13deacd29879 service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Updated VIF entry in instance network info cache for port f54d08d7-24e7-4c0b-8b56-118bdc4e2e96. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1794.029546] env[62820]: DEBUG nova.network.neutron [req-e665654b-f653-4948-961f-6f3f604929d9 req-80b9875e-be7c-463b-a428-13deacd29879 service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Updating instance_info_cache with network_info: [{"id": "f54d08d7-24e7-4c0b-8b56-118bdc4e2e96", "address": "fa:16:3e:e6:2f:7b", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf54d08d7-24", "ovs_interfaceid": "f54d08d7-24e7-4c0b-8b56-118bdc4e2e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.056817] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1794.072901] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52652b9f-0a69-272e-a5d4-2ee9a18c61fa, 'name': SearchDatastore_Task, 'duration_secs': 0.025687} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.077403] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d85f2e1c-78b8-4f5f-8b66-8dc467972473 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.087402] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1794.087402] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526eb2a2-88ae-3cb0-fd78-7df6b144b82d" [ 1794.087402] env[62820]: _type = "Task" [ 1794.087402] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.095346] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526eb2a2-88ae-3cb0-fd78-7df6b144b82d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.140397] env[62820]: DEBUG nova.network.neutron [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Updating instance_info_cache with network_info: [{"id": "9b1b9e77-da64-4b99-b993-0175cd83f6b7", "address": "fa:16:3e:c7:14:2b", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1b9e77-da", "ovs_interfaceid": "9b1b9e77-da64-4b99-b993-0175cd83f6b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.153577] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1794.187134] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1794.297511] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696309, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108746} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.297936] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1794.298701] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2537030b-df69-40dc-82c7-89bbe4124b71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.327704] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da/6768101f-8d1d-46be-b0b9-2fdf6cba08da.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1794.327989] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e536a28d-15ef-4f15-bd8e-2f970441dab8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.347076] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696308, 'name': ReconfigVM_Task, 'duration_secs': 0.669497} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.350615] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Reconfigured VM instance instance-00000062 to attach disk [datastore1] a495b540-806d-4cd8-b340-86fe937867cd/a495b540-806d-4cd8-b340-86fe937867cd.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1794.353193] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22996354-7476-481e-9256-be8d6e1404d1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.355741] env[62820]: DEBUG nova.compute.manager [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Received event network-vif-plugged-9b1b9e77-da64-4b99-b993-0175cd83f6b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1794.356032] env[62820]: DEBUG oslo_concurrency.lockutils [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] Acquiring lock "f78bf828-b9ab-480e-bd58-3dd8587780ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.356242] env[62820]: DEBUG oslo_concurrency.lockutils [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] Lock "f78bf828-b9ab-480e-bd58-3dd8587780ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.356417] env[62820]: DEBUG oslo_concurrency.lockutils [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] Lock "f78bf828-b9ab-480e-bd58-3dd8587780ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.356596] env[62820]: DEBUG nova.compute.manager [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] No waiting events found dispatching network-vif-plugged-9b1b9e77-da64-4b99-b993-0175cd83f6b7 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1794.356767] env[62820]: WARNING nova.compute.manager [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Received unexpected event network-vif-plugged-9b1b9e77-da64-4b99-b993-0175cd83f6b7 for instance with vm_state building and task_state spawning. [ 1794.356933] env[62820]: DEBUG nova.compute.manager [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Received event network-changed-9b1b9e77-da64-4b99-b993-0175cd83f6b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1794.357618] env[62820]: DEBUG nova.compute.manager [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Refreshing instance network info cache due to event network-changed-9b1b9e77-da64-4b99-b993-0175cd83f6b7. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1794.357618] env[62820]: DEBUG oslo_concurrency.lockutils [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] Acquiring lock "refresh_cache-f78bf828-b9ab-480e-bd58-3dd8587780ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.359375] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1794.359375] env[62820]: value = "task-1696310" [ 1794.359375] env[62820]: _type = "Task" [ 1794.359375] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.365663] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1794.365663] env[62820]: value = "task-1696311" [ 1794.365663] env[62820]: _type = "Task" [ 1794.365663] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.373920] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696310, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.380410] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696311, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.538108] env[62820]: DEBUG oslo_concurrency.lockutils [req-e665654b-f653-4948-961f-6f3f604929d9 req-80b9875e-be7c-463b-a428-13deacd29879 service nova] Releasing lock "refresh_cache-392d8bca-2d8d-42c3-ba14-fc1387c75405" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.546321] env[62820]: DEBUG nova.network.neutron [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [{"id": "1c306539-7756-458b-84e7-61bfbc0c7f35", "address": "fa:16:3e:68:a0:af", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c306539-77", "ovs_interfaceid": "1c306539-7756-458b-84e7-61bfbc0c7f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.596613] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.602996] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526eb2a2-88ae-3cb0-fd78-7df6b144b82d, 'name': SearchDatastore_Task, 'duration_secs': 0.022932} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.603733] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.604222] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a8803178-7fa3-42ea-824c-901063673062/a8803178-7fa3-42ea-824c-901063673062.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1794.605216] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d95f3f50-db53-489c-95fc-3050dd9ef20c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.615934] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1794.615934] env[62820]: value = "task-1696312" [ 1794.615934] env[62820]: _type = "Task" [ 1794.615934] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.625638] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.644019] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "refresh_cache-f78bf828-b9ab-480e-bd58-3dd8587780ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.644019] env[62820]: DEBUG nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Instance network_info: |[{"id": "9b1b9e77-da64-4b99-b993-0175cd83f6b7", "address": "fa:16:3e:c7:14:2b", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1b9e77-da", "ovs_interfaceid": "9b1b9e77-da64-4b99-b993-0175cd83f6b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1794.645023] env[62820]: DEBUG oslo_concurrency.lockutils [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] Acquired lock "refresh_cache-f78bf828-b9ab-480e-bd58-3dd8587780ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.645023] env[62820]: DEBUG nova.network.neutron [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Refreshing network info cache for port 9b1b9e77-da64-4b99-b993-0175cd83f6b7 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1794.647047] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:14:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b1b9e77-da64-4b99-b993-0175cd83f6b7', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1794.656126] env[62820]: DEBUG oslo.service.loopingcall [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1794.660390] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1794.665609] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-754831c9-93f6-418f-be2e-18d3dd7a3d39 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.683803] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.698217] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1794.698217] env[62820]: value = "task-1696313" [ 1794.698217] env[62820]: _type = "Task" [ 1794.698217] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.713519] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696313, 'name': CreateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.717288] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.755940] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.756575] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.756575] env[62820]: DEBUG nova.network.neutron [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1794.871696] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.881710] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696311, 'name': Rename_Task, 'duration_secs': 0.323808} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.886057] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1794.886851] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66b51690-cd30-4c75-88e7-e461c6859277 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.898507] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1794.898507] env[62820]: value = "task-1696314" [ 1794.898507] env[62820]: _type = "Task" [ 1794.898507] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.904519] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73be7e14-faac-48f6-9f9b-acd0c09a8e44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.917663] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f116db-7d77-432b-a0ed-d339a5e5c7e5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.921739] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696314, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.953116] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123377a1-609c-4370-a974-64c2b975b11e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.965944] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f85d1d4-fd0f-4d5c-b903-dc9de148f1da {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.983879] env[62820]: DEBUG nova.compute.provider_tree [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1795.049751] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Releasing lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.049751] env[62820]: DEBUG nova.objects.instance [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lazy-loading 'flavor' on Instance uuid 3228cd34-2144-425a-aca6-400cb0991e43 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.130989] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696312, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.214310] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696313, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.371250] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696310, 'name': ReconfigVM_Task, 'duration_secs': 0.702234} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.371531] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da/6768101f-8d1d-46be-b0b9-2fdf6cba08da.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1795.372309] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2a1212f-d62d-4922-8386-363e77c0741e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.382605] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1795.382605] env[62820]: value = "task-1696315" [ 1795.382605] env[62820]: _type = "Task" [ 1795.382605] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.393748] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696315, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.409696] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696314, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.487876] env[62820]: DEBUG nova.scheduler.client.report [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1795.495170] env[62820]: INFO nova.network.neutron [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Port 5120b8e3-6688-4386-9c99-ee01add07316 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1795.495170] env[62820]: DEBUG nova.network.neutron [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.555019] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c8505c-f2a8-4838-b5ed-5157a004b916 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.581614] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1795.581846] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33baf437-b1c7-48e3-a9fc-5375f0723a95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.584304] env[62820]: DEBUG nova.network.neutron [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Updated VIF entry in instance network info cache for port 9b1b9e77-da64-4b99-b993-0175cd83f6b7. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1795.584641] env[62820]: DEBUG nova.network.neutron [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Updating instance_info_cache with network_info: [{"id": "9b1b9e77-da64-4b99-b993-0175cd83f6b7", "address": "fa:16:3e:c7:14:2b", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1b9e77-da", "ovs_interfaceid": "9b1b9e77-da64-4b99-b993-0175cd83f6b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.593474] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1795.593474] env[62820]: value = "task-1696316" [ 1795.593474] env[62820]: _type = "Task" [ 1795.593474] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.605602] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.627489] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.609744} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.627874] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] a8803178-7fa3-42ea-824c-901063673062/a8803178-7fa3-42ea-824c-901063673062.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1795.628117] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1795.628379] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed184439-bad3-453e-a889-c7a3c26b8af7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.636559] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1795.636559] env[62820]: value = "task-1696317" [ 1795.636559] env[62820]: _type = "Task" [ 1795.636559] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.646047] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696317, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.713229] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696313, 'name': CreateVM_Task, 'duration_secs': 0.727214} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.713428] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1795.714160] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.714332] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.714646] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1795.714923] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab62d371-008b-43d1-8020-a161c8f7deac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.720782] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1795.720782] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c6bb97-13e9-d93e-a7aa-89c93dd0b76f" [ 1795.720782] env[62820]: _type = "Task" [ 1795.720782] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.731567] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c6bb97-13e9-d93e-a7aa-89c93dd0b76f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.750722] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-4ea2be66-06b4-4519-82b0-c2b1df329a5a-5120b8e3-6688-4386-9c99-ee01add07316" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.751058] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-4ea2be66-06b4-4519-82b0-c2b1df329a5a-5120b8e3-6688-4386-9c99-ee01add07316" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.751565] env[62820]: DEBUG nova.objects.instance [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'flavor' on Instance uuid 4ea2be66-06b4-4519-82b0-c2b1df329a5a {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.893567] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696315, 'name': Rename_Task, 'duration_secs': 0.359781} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.893892] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1795.894159] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c8fb66b-8034-4b22-9796-e03fb733cce6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.904054] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1795.904054] env[62820]: value = "task-1696318" [ 1795.904054] env[62820]: _type = "Task" [ 1795.904054] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.911168] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696314, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.916207] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.997959] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.087465] env[62820]: DEBUG oslo_concurrency.lockutils [req-6a87af1e-29f5-4377-8b8a-c1c9e3aa56a7 req-05a21229-d268-4b2d-a16d-21e7950caf19 service nova] Releasing lock "refresh_cache-f78bf828-b9ab-480e-bd58-3dd8587780ea" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.104286] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696316, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.147284] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696317, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07377} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.147551] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1796.148348] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb29e94-4b27-4365-8206-3f9c988c4ea7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.171028] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] a8803178-7fa3-42ea-824c-901063673062/a8803178-7fa3-42ea-824c-901063673062.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1796.171357] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44f2af20-8814-461b-93cd-3dc8f8d81225 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.192720] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1796.192720] env[62820]: value = "task-1696319" [ 1796.192720] env[62820]: _type = "Task" [ 1796.192720] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.204384] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.232639] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c6bb97-13e9-d93e-a7aa-89c93dd0b76f, 'name': SearchDatastore_Task, 'duration_secs': 0.019626} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.233122] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.233424] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1796.233776] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.233974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.234265] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1796.234574] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-187ee163-f580-4de8-9556-caa35f04ee10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.244692] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1796.244888] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1796.245808] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-203c8912-31bd-4210-8852-65b5d1c59643 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.252739] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1796.252739] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5225f4a1-9ec6-b484-e9be-6752537808d9" [ 1796.252739] env[62820]: _type = "Task" [ 1796.252739] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.265889] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5225f4a1-9ec6-b484-e9be-6752537808d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.373063] env[62820]: DEBUG nova.objects.instance [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'pci_requests' on Instance uuid 4ea2be66-06b4-4519-82b0-c2b1df329a5a {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1796.387075] env[62820]: DEBUG nova.compute.manager [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1796.387249] env[62820]: DEBUG nova.compute.manager [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing instance network info cache due to event network-changed-37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1796.387464] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] Acquiring lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.387606] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] Acquired lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.387764] env[62820]: DEBUG nova.network.neutron [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Refreshing network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1796.413911] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696314, 'name': PowerOnVM_Task, 'duration_secs': 1.340385} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.414214] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1796.414412] env[62820]: DEBUG nova.compute.manager [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1796.415609] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2706281a-8a50-4fee-a01c-9f0a4d33b321 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.421943] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696318, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.503668] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7ff68a0e-2500-4723-980c-24b1e7c9fb60 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-9c0d9676-9db9-4be2-a8e6-84bd816234aa-5120b8e3-6688-4386-9c99-ee01add07316" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.129s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.506168] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.495s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.510378] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.917s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.511912] env[62820]: INFO nova.compute.claims [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1796.604710] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696316, 'name': PowerOffVM_Task, 'duration_secs': 0.7504} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.605060] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1796.610234] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfiguring VM instance instance-00000047 to detach disk 2002 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1796.610503] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0366f9ea-2c4d-4da4-bc7d-b60ceab354ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.629445] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1796.629445] env[62820]: value = "task-1696320" [ 1796.629445] env[62820]: _type = "Task" [ 1796.629445] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.637583] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696320, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.702343] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696319, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.764641] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5225f4a1-9ec6-b484-e9be-6752537808d9, 'name': SearchDatastore_Task, 'duration_secs': 0.018954} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.765532] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf0420d0-d3cb-4012-90a3-d372574e1a71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.772392] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1796.772392] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520f7031-9fd8-46a1-2464-a9c1e9e24167" [ 1796.772392] env[62820]: _type = "Task" [ 1796.772392] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.781038] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520f7031-9fd8-46a1-2464-a9c1e9e24167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.875381] env[62820]: DEBUG nova.objects.base [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Object Instance<4ea2be66-06b4-4519-82b0-c2b1df329a5a> lazy-loaded attributes: flavor,pci_requests {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1796.875624] env[62820]: DEBUG nova.network.neutron [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1796.918569] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696318, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.930477] env[62820]: INFO nova.compute.manager [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] bringing vm to original state: 'stopped' [ 1796.990201] env[62820]: DEBUG nova.policy [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0ce10fc402147aaa582cf9d9337a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d4dc6b875b5420d87321f79b04bde9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1797.094354] env[62820]: INFO nova.scheduler.client.report [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted allocation for migration b8b6ec5c-db87-41a5-a6af-b30a71a3bb9f [ 1797.114122] env[62820]: DEBUG nova.network.neutron [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updated VIF entry in instance network info cache for port 37f8bb7e-538f-426a-a4e3-1ae811cad8d3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1797.114554] env[62820]: DEBUG nova.network.neutron [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [{"id": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "address": "fa:16:3e:ec:6d:0d", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37f8bb7e-53", "ovs_interfaceid": "37f8bb7e-538f-426a-a4e3-1ae811cad8d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.139938] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696320, 'name': ReconfigVM_Task, 'duration_secs': 0.422342} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.140227] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfigured VM instance instance-00000047 to detach disk 2002 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1797.140415] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1797.140667] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e818a39a-c8ba-40b6-b3c9-e72c714b8578 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.147461] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1797.147461] env[62820]: value = "task-1696321" [ 1797.147461] env[62820]: _type = "Task" [ 1797.147461] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.163621] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696321, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.203078] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696319, 'name': ReconfigVM_Task, 'duration_secs': 0.879048} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.203447] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfigured VM instance instance-00000066 to attach disk [datastore1] a8803178-7fa3-42ea-824c-901063673062/a8803178-7fa3-42ea-824c-901063673062.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1797.204458] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-904600ff-d9b9-4b32-90bf-c554b7182b5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.211963] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1797.211963] env[62820]: value = "task-1696322" [ 1797.211963] env[62820]: _type = "Task" [ 1797.211963] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.221627] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696322, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.283512] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520f7031-9fd8-46a1-2464-a9c1e9e24167, 'name': SearchDatastore_Task, 'duration_secs': 0.021578} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.283795] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.284061] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] f78bf828-b9ab-480e-bd58-3dd8587780ea/f78bf828-b9ab-480e-bd58-3dd8587780ea.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1797.284387] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-717dcb12-9282-4d2f-9bc8-829c07c9e25e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.292482] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1797.292482] env[62820]: value = "task-1696323" [ 1797.292482] env[62820]: _type = "Task" [ 1797.292482] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.301703] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.418323] env[62820]: DEBUG oslo_vmware.api [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696318, 'name': PowerOnVM_Task, 'duration_secs': 1.453136} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.418617] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1797.418858] env[62820]: DEBUG nova.compute.manager [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1797.419763] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccf8c30-5405-4c6a-86df-a687fbce5810 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.600977] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ec4a7e8d-ae98-4db6-aac7-573ad5d56a62 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.229s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.617395] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] Releasing lock "refresh_cache-9c0d9676-9db9-4be2-a8e6-84bd816234aa" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.617648] env[62820]: DEBUG nova.compute.manager [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received event network-changed-ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1797.617836] env[62820]: DEBUG nova.compute.manager [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing instance network info cache due to event network-changed-ced8fea2-d4eb-4f3b-b2be-7974608dd130. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1797.618607] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] Acquiring lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.618810] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] Acquired lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.619010] env[62820]: DEBUG nova.network.neutron [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing network info cache for port ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1797.657590] env[62820]: DEBUG oslo_vmware.api [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696321, 'name': PowerOnVM_Task, 'duration_secs': 0.438927} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.659981] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1797.660232] env[62820]: DEBUG nova.compute.manager [None req-fd96ea68-af60-40b6-b3a3-6a91fc867fef tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1797.661166] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491ce599-c556-4a05-ab16-998725f3d03e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.723593] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696322, 'name': Rename_Task, 'duration_secs': 0.182416} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.723593] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1797.723593] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-153b0cd7-cf3c-48b3-bfb4-c4d8d8ad262f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.731638] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1797.731638] env[62820]: value = "task-1696324" [ 1797.731638] env[62820]: _type = "Task" [ 1797.731638] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.744018] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.794587] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d5a78c-e309-442b-80d1-71572d7e7817 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.808847] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536ad23e-f753-4cb2-b706-abe4aa96560a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.812410] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.844806] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca434609-dd62-424b-91ca-1a1515c2c4ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.854526] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b5c792-fcb4-4f8b-9567-4a45f82108fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.870478] env[62820]: DEBUG nova.compute.provider_tree [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1797.938324] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "a495b540-806d-4cd8-b340-86fe937867cd" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.938602] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.938912] env[62820]: DEBUG nova.compute.manager [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1797.942645] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0988fc-10a1-48f6-8cd0-0272cb730032 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.946619] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.952297] env[62820]: DEBUG nova.compute.manager [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1798.243579] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696324, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.307522] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.373833] env[62820]: DEBUG nova.scheduler.client.report [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1798.402133] env[62820]: DEBUG nova.network.neutron [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updated VIF entry in instance network info cache for port ced8fea2-d4eb-4f3b-b2be-7974608dd130. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1798.402649] env[62820]: DEBUG nova.network.neutron [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.459716] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1798.460269] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "35b95400-6399-48ae-b7d5-420c33d653dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.460748] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.460915] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.461301] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.461608] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.463352] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e88a6239-7c59-473c-b8d6-055198a13e6e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.466314] env[62820]: INFO nova.compute.manager [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Terminating instance [ 1798.485338] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1798.485338] env[62820]: value = "task-1696325" [ 1798.485338] env[62820]: _type = "Task" [ 1798.485338] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.499939] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696325, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.745746] env[62820]: DEBUG oslo_vmware.api [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696324, 'name': PowerOnVM_Task, 'duration_secs': 0.764636} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.746166] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1798.746319] env[62820]: INFO nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Took 9.51 seconds to spawn the instance on the hypervisor. [ 1798.746587] env[62820]: DEBUG nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1798.747494] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3d885b-23b7-4968-8951-c00c448b2d68 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.759980] env[62820]: DEBUG nova.network.neutron [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Successfully updated port: 5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1798.807497] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.879757] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.880373] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1798.884044] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.200s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.885594] env[62820]: INFO nova.compute.claims [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1798.910031] env[62820]: DEBUG oslo_concurrency.lockutils [req-2a35c501-ea3b-4646-85a2-c9fd104fbb8c req-319de101-a6eb-4634-850c-5c266723963f service nova] Releasing lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.970786] env[62820]: DEBUG nova.compute.manager [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1798.970786] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1798.971590] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0def240c-a535-46d4-9988-ded6b2c820be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.982730] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1798.983096] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30f094fd-c588-4d7c-85de-1b66989f5b70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.994468] env[62820]: DEBUG oslo_vmware.api [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1798.994468] env[62820]: value = "task-1696326" [ 1798.994468] env[62820]: _type = "Task" [ 1798.994468] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.005127] env[62820]: DEBUG oslo_vmware.api [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696325, 'name': PowerOffVM_Task, 'duration_secs': 0.401049} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.006042] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1799.006428] env[62820]: DEBUG nova.compute.manager [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1799.009070] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece9891a-55dc-4215-992c-6c0ecea762ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.021951] env[62820]: DEBUG oslo_vmware.api [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696326, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.096497] env[62820]: DEBUG nova.compute.manager [req-ef0b01c4-cab6-4db2-9c8a-67707e5080fc req-557a310f-f9d3-479c-8612-326f4ae6327f service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received event network-vif-plugged-5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1799.097107] env[62820]: DEBUG oslo_concurrency.lockutils [req-ef0b01c4-cab6-4db2-9c8a-67707e5080fc req-557a310f-f9d3-479c-8612-326f4ae6327f service nova] Acquiring lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.097107] env[62820]: DEBUG oslo_concurrency.lockutils [req-ef0b01c4-cab6-4db2-9c8a-67707e5080fc req-557a310f-f9d3-479c-8612-326f4ae6327f service nova] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.097252] env[62820]: DEBUG oslo_concurrency.lockutils [req-ef0b01c4-cab6-4db2-9c8a-67707e5080fc req-557a310f-f9d3-479c-8612-326f4ae6327f service nova] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.097318] env[62820]: DEBUG nova.compute.manager [req-ef0b01c4-cab6-4db2-9c8a-67707e5080fc req-557a310f-f9d3-479c-8612-326f4ae6327f service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] No waiting events found dispatching network-vif-plugged-5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1799.097489] env[62820]: WARNING nova.compute.manager [req-ef0b01c4-cab6-4db2-9c8a-67707e5080fc req-557a310f-f9d3-479c-8612-326f4ae6327f service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received unexpected event network-vif-plugged-5120b8e3-6688-4386-9c99-ee01add07316 for instance with vm_state active and task_state None. [ 1799.265247] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.265456] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.265640] env[62820]: DEBUG nova.network.neutron [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1799.277596] env[62820]: INFO nova.compute.manager [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Took 15.27 seconds to build instance. [ 1799.314822] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.389681] env[62820]: DEBUG nova.compute.utils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1799.389681] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1799.389681] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1799.460496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.460722] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.505787] env[62820]: DEBUG oslo_vmware.api [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696326, 'name': PowerOffVM_Task, 'duration_secs': 0.287875} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.507231] env[62820]: DEBUG nova.policy [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ed862932b694982a765bc16b4721451', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bbc21e8b7764b9ebf63ec470ef4e1e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1799.508681] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1799.508906] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1799.509187] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-962a06fa-28e5-4a50-bf60-5bb735b9ffe8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.524021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.524021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.524021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.524021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.524021] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.526214] env[62820]: INFO nova.compute.manager [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Terminating instance [ 1799.533151] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.594s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.711474] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1799.711763] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1799.711966] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleting the datastore file [datastore1] 35b95400-6399-48ae-b7d5-420c33d653dd {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1799.712284] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5743f759-992e-4542-a8c6-a49fcb3443ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.721183] env[62820]: DEBUG oslo_vmware.api [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1799.721183] env[62820]: value = "task-1696328" [ 1799.721183] env[62820]: _type = "Task" [ 1799.721183] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.732931] env[62820]: DEBUG oslo_vmware.api [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.782069] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b130ee14-1a29-485f-a619-330d2a644e0d tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.796s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.809295] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696323, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.822024] env[62820]: WARNING nova.network.neutron [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] 26851e2e-dece-4dce-bec8-e64227003b80 already exists in list: networks containing: ['26851e2e-dece-4dce-bec8-e64227003b80']. ignoring it [ 1799.902666] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1799.969715] env[62820]: DEBUG nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1800.033259] env[62820]: DEBUG nova.compute.manager [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1800.033679] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1800.036076] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e463abe-fd79-4c5a-9be2-561462ac5552 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.045800] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.048523] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1800.051530] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e2540f7-155c-4e37-bf1e-62a1a17f62fe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.062662] env[62820]: DEBUG oslo_vmware.api [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1800.062662] env[62820]: value = "task-1696329" [ 1800.062662] env[62820]: _type = "Task" [ 1800.062662] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.081234] env[62820]: DEBUG nova.compute.manager [req-2820a1f5-d6ff-43e2-aef2-a8acb99398e2 req-5097c713-e7f1-4e8f-a16f-347d0d1360e4 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Received event network-changed-f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1800.081541] env[62820]: DEBUG nova.compute.manager [req-2820a1f5-d6ff-43e2-aef2-a8acb99398e2 req-5097c713-e7f1-4e8f-a16f-347d0d1360e4 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Refreshing instance network info cache due to event network-changed-f7027439-2429-4746-8bc9-a95ce975c96a. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1800.081847] env[62820]: DEBUG oslo_concurrency.lockutils [req-2820a1f5-d6ff-43e2-aef2-a8acb99398e2 req-5097c713-e7f1-4e8f-a16f-347d0d1360e4 service nova] Acquiring lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.082066] env[62820]: DEBUG oslo_concurrency.lockutils [req-2820a1f5-d6ff-43e2-aef2-a8acb99398e2 req-5097c713-e7f1-4e8f-a16f-347d0d1360e4 service nova] Acquired lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.082234] env[62820]: DEBUG nova.network.neutron [req-2820a1f5-d6ff-43e2-aef2-a8acb99398e2 req-5097c713-e7f1-4e8f-a16f-347d0d1360e4 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Refreshing network info cache for port f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1800.088396] env[62820]: DEBUG oslo_vmware.api [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696329, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.173443] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Successfully created port: 8dbeff3a-dcda-4f84-b5b3-b12c75219348 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1800.239861] env[62820]: DEBUG oslo_vmware.api [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.514449} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.240320] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1800.240590] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1800.240899] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1800.241366] env[62820]: INFO nova.compute.manager [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1800.241518] env[62820]: DEBUG oslo.service.loopingcall [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1800.241751] env[62820]: DEBUG nova.compute.manager [-] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1800.241895] env[62820]: DEBUG nova.network.neutron [-] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1800.316879] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696323, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.81897} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.320480] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] f78bf828-b9ab-480e-bd58-3dd8587780ea/f78bf828-b9ab-480e-bd58-3dd8587780ea.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1800.320480] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1800.325350] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc04d38b-dd94-4b60-af5e-069f517916b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.342190] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1800.342190] env[62820]: value = "task-1696330" [ 1800.342190] env[62820]: _type = "Task" [ 1800.342190] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.342662] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0445a53f-1e1e-4fe9-81ea-3ac7b268323d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.361854] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736fb0d5-093f-4ce7-b8ba-4a506c380877 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.365670] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696330, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.399214] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f3f14f-8eb9-4f93-972c-baed4220d68d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.419217] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44a32bb-346d-4e40-a0f6-c80c83cac3a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.434250] env[62820]: DEBUG nova.compute.provider_tree [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.456924] env[62820]: DEBUG nova.network.neutron [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5120b8e3-6688-4386-9c99-ee01add07316", "address": "fa:16:3e:02:c5:f6", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5120b8e3-66", "ovs_interfaceid": "5120b8e3-6688-4386-9c99-ee01add07316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.498544] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.583301] env[62820]: DEBUG oslo_vmware.api [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696329, 'name': PowerOffVM_Task, 'duration_secs': 0.242031} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.583669] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1800.583927] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1800.584314] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a47fd1e6-fea2-4080-a533-c5fc5559ebb2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.685417] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1800.685649] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1800.685832] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleting the datastore file [datastore1] 6768101f-8d1d-46be-b0b9-2fdf6cba08da {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1800.686134] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64f7fb1f-67ea-4750-8dcc-92c86c0d58ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.694428] env[62820]: DEBUG oslo_vmware.api [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1800.694428] env[62820]: value = "task-1696332" [ 1800.694428] env[62820]: _type = "Task" [ 1800.694428] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.706811] env[62820]: DEBUG oslo_vmware.api [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.814255] env[62820]: DEBUG nova.network.neutron [req-2820a1f5-d6ff-43e2-aef2-a8acb99398e2 req-5097c713-e7f1-4e8f-a16f-347d0d1360e4 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Updated VIF entry in instance network info cache for port f7027439-2429-4746-8bc9-a95ce975c96a. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1800.814641] env[62820]: DEBUG nova.network.neutron [req-2820a1f5-d6ff-43e2-aef2-a8acb99398e2 req-5097c713-e7f1-4e8f-a16f-347d0d1360e4 service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [{"id": "f7027439-2429-4746-8bc9-a95ce975c96a", "address": "fa:16:3e:80:90:a0", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7027439-24", "ovs_interfaceid": "f7027439-2429-4746-8bc9-a95ce975c96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.852182] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696330, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214103} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.852449] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1800.853334] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea89d67f-7663-456e-8cc7-7db79808eb27 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.875898] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] f78bf828-b9ab-480e-bd58-3dd8587780ea/f78bf828-b9ab-480e-bd58-3dd8587780ea.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1800.876220] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b5d95e7-086e-49ea-94ac-a1a821617edf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.897847] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1800.897847] env[62820]: value = "task-1696333" [ 1800.897847] env[62820]: _type = "Task" [ 1800.897847] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.906487] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696333, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.909979] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "a495b540-806d-4cd8-b340-86fe937867cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.910222] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.910420] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "a495b540-806d-4cd8-b340-86fe937867cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.910604] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.910771] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.912880] env[62820]: INFO nova.compute.manager [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Terminating instance [ 1800.922547] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1800.938726] env[62820]: DEBUG nova.scheduler.client.report [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1800.951171] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1800.951500] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1800.951607] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1800.952186] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1800.952186] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1800.952186] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1800.952340] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1800.952497] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1800.952688] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1800.952857] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1800.953034] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1800.953945] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46ed98c-724b-4088-9761-94a97fffe8d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.959081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.959679] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.959905] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.962608] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ca979a-1d51-4fdf-a852-484eb14ec446 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.966516] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf40765c-5e88-4638-8b97-5445a0ef8789 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.984823] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1800.985131] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1800.985246] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1800.985436] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1800.985601] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1800.985735] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1800.985937] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1800.986117] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1800.986290] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1800.986507] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1800.986643] env[62820]: DEBUG nova.virt.hardware [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1800.992861] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Reconfiguring VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1801.000972] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd1c6e6b-4ed0-4e93-a6ba-d4ca470b004d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.026112] env[62820]: DEBUG oslo_vmware.api [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1801.026112] env[62820]: value = "task-1696334" [ 1801.026112] env[62820]: _type = "Task" [ 1801.026112] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.035619] env[62820]: DEBUG oslo_vmware.api [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696334, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.063699] env[62820]: DEBUG nova.network.neutron [-] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.205581] env[62820]: DEBUG oslo_vmware.api [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.466388} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.206409] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1801.206409] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1801.206600] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1801.206801] env[62820]: INFO nova.compute.manager [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1801.207085] env[62820]: DEBUG oslo.service.loopingcall [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.207292] env[62820]: DEBUG nova.compute.manager [-] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1801.207388] env[62820]: DEBUG nova.network.neutron [-] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1801.214707] env[62820]: DEBUG nova.compute.manager [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received event network-changed-5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1801.214832] env[62820]: DEBUG nova.compute.manager [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing instance network info cache due to event network-changed-5120b8e3-6688-4386-9c99-ee01add07316. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1801.215048] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Acquiring lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.215209] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Acquired lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.215351] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Refreshing network info cache for port 5120b8e3-6688-4386-9c99-ee01add07316 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1801.317891] env[62820]: DEBUG oslo_concurrency.lockutils [req-2820a1f5-d6ff-43e2-aef2-a8acb99398e2 req-5097c713-e7f1-4e8f-a16f-347d0d1360e4 service nova] Releasing lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.412288] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696333, 'name': ReconfigVM_Task, 'duration_secs': 0.466447} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.412700] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Reconfigured VM instance instance-00000067 to attach disk [datastore1] f78bf828-b9ab-480e-bd58-3dd8587780ea/f78bf828-b9ab-480e-bd58-3dd8587780ea.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1801.413521] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb1428d1-6a7d-4bed-9477-43df6e0d3394 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.416136] env[62820]: DEBUG nova.compute.manager [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1801.416361] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1801.417121] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ec85d2-0793-40c1-850c-276e7121742e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.425821] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1801.427071] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-847ba079-a079-4a08-a330-4096ef9731b7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.428679] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1801.428679] env[62820]: value = "task-1696335" [ 1801.428679] env[62820]: _type = "Task" [ 1801.428679] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.438837] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696335, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.445873] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.446471] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1801.449740] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.733s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.451339] env[62820]: INFO nova.compute.claims [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1801.537384] env[62820]: DEBUG oslo_vmware.api [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.566549] env[62820]: INFO nova.compute.manager [-] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Took 1.32 seconds to deallocate network for instance. [ 1801.583372] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1801.583594] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1801.583754] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleting the datastore file [datastore1] a495b540-806d-4cd8-b340-86fe937867cd {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1801.584070] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4f68642-7e3a-4fb5-a90d-3f751b7eadf8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.592131] env[62820]: DEBUG oslo_vmware.api [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1801.592131] env[62820]: value = "task-1696337" [ 1801.592131] env[62820]: _type = "Task" [ 1801.592131] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.601292] env[62820]: DEBUG oslo_vmware.api [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.937791] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Successfully updated port: 8dbeff3a-dcda-4f84-b5b3-b12c75219348 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1801.946071] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696335, 'name': Rename_Task, 'duration_secs': 0.227364} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.946802] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1801.947345] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc88e673-9988-45ad-aa32-56bcac687dcb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.950875] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updated VIF entry in instance network info cache for port 5120b8e3-6688-4386-9c99-ee01add07316. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1801.951406] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5120b8e3-6688-4386-9c99-ee01add07316", "address": "fa:16:3e:02:c5:f6", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5120b8e3-66", "ovs_interfaceid": "5120b8e3-6688-4386-9c99-ee01add07316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.956098] env[62820]: DEBUG nova.compute.utils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1801.959424] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1801.959424] env[62820]: value = "task-1696338" [ 1801.959424] env[62820]: _type = "Task" [ 1801.959424] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.960720] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1801.960720] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1801.965505] env[62820]: DEBUG nova.network.neutron [-] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.974036] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696338, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.010818] env[62820]: DEBUG nova.policy [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ed862932b694982a765bc16b4721451', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bbc21e8b7764b9ebf63ec470ef4e1e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1802.037254] env[62820]: DEBUG oslo_vmware.api [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696334, 'name': ReconfigVM_Task, 'duration_secs': 0.810991} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.037760] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.037982] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Reconfigured VM to attach interface {{(pid=62820) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1802.074306] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.102077] env[62820]: DEBUG oslo_vmware.api [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.495376} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.106017] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1802.106017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1802.106017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1802.106017] env[62820]: INFO nova.compute.manager [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Took 0.69 seconds to destroy the instance on the hypervisor. [ 1802.106017] env[62820]: DEBUG oslo.service.loopingcall [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.106017] env[62820]: DEBUG nova.compute.manager [-] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1802.106017] env[62820]: DEBUG nova.network.neutron [-] [instance: a495b540-806d-4cd8-b340-86fe937867cd] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1802.118634] env[62820]: DEBUG nova.compute.manager [req-e877b543-6c66-41d0-8f02-fdbcba18a922 req-5cce5e6b-2ac7-4b5a-9842-ce97ea4c8c53 service nova] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Received event network-vif-deleted-778fee60-5af2-4328-a536-56882267761d {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1802.118961] env[62820]: DEBUG nova.compute.manager [req-e877b543-6c66-41d0-8f02-fdbcba18a922 req-5cce5e6b-2ac7-4b5a-9842-ce97ea4c8c53 service nova] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Received event network-vif-deleted-94134340-ccc9-4f22-af2f-2b68424f6ec3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1802.263652] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Successfully created port: 609eb18a-aed2-4b6d-bb13-b94371396c84 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1802.444823] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "refresh_cache-8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.444823] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "refresh_cache-8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.444823] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1802.455452] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Releasing lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.455854] env[62820]: DEBUG nova.compute.manager [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Received event network-changed-1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1802.456249] env[62820]: DEBUG nova.compute.manager [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Refreshing instance network info cache due to event network-changed-1c306539-7756-458b-84e7-61bfbc0c7f35. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1802.456573] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Acquiring lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.456847] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Acquired lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.457134] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Refreshing network info cache for port 1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1802.463536] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1802.468243] env[62820]: INFO nova.compute.manager [-] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Took 1.26 seconds to deallocate network for instance. [ 1802.490321] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696338, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.542107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dbe05ec9-5584-4a2b-abef-291b46934c03 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-4ea2be66-06b4-4519-82b0-c2b1df329a5a-5120b8e3-6688-4386-9c99-ee01add07316" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.791s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.730750] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad67c9b-4481-4b4c-b507-43792e910178 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.738579] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36038cc-89d9-4210-8a6a-d5bd7b481d42 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.771064] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e409ab-139a-4016-b4c3-ff1d660b965c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.780352] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a412a50-fec0-4248-99de-7bac237d8f0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.793969] env[62820]: DEBUG nova.compute.provider_tree [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.900460] env[62820]: DEBUG nova.network.neutron [-] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.978481] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1802.984077] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696338, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.984987] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.115892] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Updating instance_info_cache with network_info: [{"id": "8dbeff3a-dcda-4f84-b5b3-b12c75219348", "address": "fa:16:3e:ed:77:3e", "network": {"id": "d1172873-31af-4376-9613-a996b597df96", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-894948617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bbc21e8b7764b9ebf63ec470ef4e1e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dbeff3a-dc", "ovs_interfaceid": "8dbeff3a-dcda-4f84-b5b3-b12c75219348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.188166] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updated VIF entry in instance network info cache for port 1c306539-7756-458b-84e7-61bfbc0c7f35. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1803.188547] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [{"id": "1c306539-7756-458b-84e7-61bfbc0c7f35", "address": "fa:16:3e:68:a0:af", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c306539-77", "ovs_interfaceid": "1c306539-7756-458b-84e7-61bfbc0c7f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.243759] env[62820]: DEBUG nova.compute.manager [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Received event network-vif-plugged-8dbeff3a-dcda-4f84-b5b3-b12c75219348 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1803.243985] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] Acquiring lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.244211] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] Lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.244383] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] Lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.244551] env[62820]: DEBUG nova.compute.manager [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] No waiting events found dispatching network-vif-plugged-8dbeff3a-dcda-4f84-b5b3-b12c75219348 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1803.244717] env[62820]: WARNING nova.compute.manager [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Received unexpected event network-vif-plugged-8dbeff3a-dcda-4f84-b5b3-b12c75219348 for instance with vm_state building and task_state spawning. [ 1803.244878] env[62820]: DEBUG nova.compute.manager [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Received event network-changed-8dbeff3a-dcda-4f84-b5b3-b12c75219348 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1803.245044] env[62820]: DEBUG nova.compute.manager [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Refreshing instance network info cache due to event network-changed-8dbeff3a-dcda-4f84-b5b3-b12c75219348. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1803.245212] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] Acquiring lock "refresh_cache-8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.297059] env[62820]: DEBUG nova.scheduler.client.report [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1803.352391] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1803.352611] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1803.352761] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1803.403225] env[62820]: INFO nova.compute.manager [-] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Took 1.30 seconds to deallocate network for instance. [ 1803.479067] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1803.488364] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696338, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.507088] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1803.507404] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1803.507601] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1803.507821] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1803.508009] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1803.508211] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1803.508455] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1803.508653] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1803.508913] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1803.509177] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1803.509410] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1803.510367] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ac0af6-2ee6-422b-97da-1dc80479d1ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.518618] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94055869-2037-4f6b-af7b-17760d7e80d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.618497] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "refresh_cache-8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.618921] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Instance network_info: |[{"id": "8dbeff3a-dcda-4f84-b5b3-b12c75219348", "address": "fa:16:3e:ed:77:3e", "network": {"id": "d1172873-31af-4376-9613-a996b597df96", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-894948617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bbc21e8b7764b9ebf63ec470ef4e1e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dbeff3a-dc", "ovs_interfaceid": "8dbeff3a-dcda-4f84-b5b3-b12c75219348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1803.619314] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] Acquired lock "refresh_cache-8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.619538] env[62820]: DEBUG nova.network.neutron [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Refreshing network info cache for port 8dbeff3a-dcda-4f84-b5b3-b12c75219348 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1803.620924] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:77:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8dbeff3a-dcda-4f84-b5b3-b12c75219348', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1803.628396] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Creating folder: Project (2bbc21e8b7764b9ebf63ec470ef4e1e0). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1803.629466] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2665bc84-deb9-420b-b85e-06517ec07641 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.643236] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Created folder: Project (2bbc21e8b7764b9ebf63ec470ef4e1e0) in parent group-v353379. [ 1803.643445] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Creating folder: Instances. Parent ref: group-v353665. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1803.643693] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8abcea9d-0be2-408d-b0b8-bff96f18731d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.654028] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Created folder: Instances in parent group-v353665. [ 1803.654283] env[62820]: DEBUG oslo.service.loopingcall [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1803.654476] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1803.654679] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa1a7cef-be91-4856-a9f0-df9e40970daa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.674404] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1803.674404] env[62820]: value = "task-1696341" [ 1803.674404] env[62820]: _type = "Task" [ 1803.674404] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.683053] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696341, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.691200] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Releasing lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.691505] env[62820]: DEBUG nova.compute.manager [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Received event network-changed-1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1803.691678] env[62820]: DEBUG nova.compute.manager [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Refreshing instance network info cache due to event network-changed-1c306539-7756-458b-84e7-61bfbc0c7f35. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1803.691890] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Acquiring lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.692048] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Acquired lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.692218] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Refreshing network info cache for port 1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1803.801454] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.804020] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1803.804680] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.858s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.804858] env[62820]: DEBUG nova.objects.instance [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1803.900708] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.900952] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.901071] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1803.912220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.956099] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "interface-4ea2be66-06b4-4519-82b0-c2b1df329a5a-5120b8e3-6688-4386-9c99-ee01add07316" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.956383] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-4ea2be66-06b4-4519-82b0-c2b1df329a5a-5120b8e3-6688-4386-9c99-ee01add07316" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.980066] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Successfully updated port: 609eb18a-aed2-4b6d-bb13-b94371396c84 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1803.987534] env[62820]: DEBUG oslo_vmware.api [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696338, 'name': PowerOnVM_Task, 'duration_secs': 1.913255} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.987792] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1803.987995] env[62820]: INFO nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Took 12.30 seconds to spawn the instance on the hypervisor. [ 1803.988191] env[62820]: DEBUG nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1803.988981] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59e04dc-12c6-4b3a-ae81-0b7b329d1b67 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.145808] env[62820]: DEBUG nova.compute.manager [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Received event network-vif-plugged-609eb18a-aed2-4b6d-bb13-b94371396c84 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1804.146043] env[62820]: DEBUG oslo_concurrency.lockutils [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] Acquiring lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.146263] env[62820]: DEBUG oslo_concurrency.lockutils [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] Lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.146433] env[62820]: DEBUG oslo_concurrency.lockutils [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] Lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.146618] env[62820]: DEBUG nova.compute.manager [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] No waiting events found dispatching network-vif-plugged-609eb18a-aed2-4b6d-bb13-b94371396c84 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1804.146818] env[62820]: WARNING nova.compute.manager [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Received unexpected event network-vif-plugged-609eb18a-aed2-4b6d-bb13-b94371396c84 for instance with vm_state building and task_state spawning. [ 1804.147104] env[62820]: DEBUG nova.compute.manager [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Received event network-changed-609eb18a-aed2-4b6d-bb13-b94371396c84 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1804.147543] env[62820]: DEBUG nova.compute.manager [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Refreshing instance network info cache due to event network-changed-609eb18a-aed2-4b6d-bb13-b94371396c84. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1804.147543] env[62820]: DEBUG oslo_concurrency.lockutils [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] Acquiring lock "refresh_cache-488900b2-d0c9-4437-9f0c-dfb2ea38cb71" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.147669] env[62820]: DEBUG oslo_concurrency.lockutils [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] Acquired lock "refresh_cache-488900b2-d0c9-4437-9f0c-dfb2ea38cb71" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.147766] env[62820]: DEBUG nova.network.neutron [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Refreshing network info cache for port 609eb18a-aed2-4b6d-bb13-b94371396c84 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1804.185545] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696341, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.308906] env[62820]: DEBUG nova.compute.utils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1804.310302] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1804.310476] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1804.365724] env[62820]: DEBUG nova.policy [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ed862932b694982a765bc16b4721451', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bbc21e8b7764b9ebf63ec470ef4e1e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1804.462587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.462587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.463726] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5751a0a-87d5-4fc6-ab95-9513133053c4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.467545] env[62820]: DEBUG nova.network.neutron [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Updated VIF entry in instance network info cache for port 8dbeff3a-dcda-4f84-b5b3-b12c75219348. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1804.468107] env[62820]: DEBUG nova.network.neutron [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Updating instance_info_cache with network_info: [{"id": "8dbeff3a-dcda-4f84-b5b3-b12c75219348", "address": "fa:16:3e:ed:77:3e", "network": {"id": "d1172873-31af-4376-9613-a996b597df96", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-894948617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bbc21e8b7764b9ebf63ec470ef4e1e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dbeff3a-dc", "ovs_interfaceid": "8dbeff3a-dcda-4f84-b5b3-b12c75219348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.494264] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "refresh_cache-488900b2-d0c9-4437-9f0c-dfb2ea38cb71" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.497097] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb32b75c-883b-41c6-86de-79083691052f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.512847] env[62820]: INFO nova.compute.manager [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Took 18.21 seconds to build instance. [ 1804.555462] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Reconfiguring VM to detach interface {{(pid=62820) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1804.556146] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0b22339-8f9a-4b54-8bd4-4200de374b5c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.569842] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updated VIF entry in instance network info cache for port 1c306539-7756-458b-84e7-61bfbc0c7f35. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1804.570237] env[62820]: DEBUG nova.network.neutron [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [{"id": "1c306539-7756-458b-84e7-61bfbc0c7f35", "address": "fa:16:3e:68:a0:af", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c306539-77", "ovs_interfaceid": "1c306539-7756-458b-84e7-61bfbc0c7f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.580107] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1804.580107] env[62820]: value = "task-1696342" [ 1804.580107] env[62820]: _type = "Task" [ 1804.580107] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.590768] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.656858] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Successfully created port: 3b0e0049-3600-401b-b074-0a891b2829e0 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1804.679788] env[62820]: DEBUG nova.network.neutron [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1804.687402] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696341, 'name': CreateVM_Task, 'duration_secs': 0.556541} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.687530] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1804.688200] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.688363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.688674] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1804.688956] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7882f40-b30a-44e4-b8e1-de9a97317fee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.693994] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1804.693994] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5279c76a-7da4-aebc-e274-5a7c3de63118" [ 1804.693994] env[62820]: _type = "Task" [ 1804.693994] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.702305] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5279c76a-7da4-aebc-e274-5a7c3de63118, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.759757] env[62820]: DEBUG nova.network.neutron [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.817729] env[62820]: DEBUG oslo_concurrency.lockutils [None req-cbbfff6e-1675-4955-a0ab-386366920e51 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.819062] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1804.822498] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.778s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.822801] env[62820]: DEBUG nova.objects.instance [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1804.975161] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] Releasing lock "refresh_cache-8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.975741] env[62820]: DEBUG nova.compute.manager [req-ea704ec1-54ae-4e43-bc5b-78c0a5c2acc5 req-c7dffd40-8eac-4643-afad-db0a807cb51a service nova] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Received event network-vif-deleted-18cc900d-6813-4f95-b166-a6b1a486f112 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1805.016728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2420e9e5-0ff1-4352-8121-055bf5145829 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "f78bf828-b9ab-480e-bd58-3dd8587780ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.722s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.073119] env[62820]: DEBUG oslo_concurrency.lockutils [req-22112fc7-7ef9-4a4d-85ed-a4a09dfb1493 req-b1fdfbd7-7f84-4bf7-829a-a68cc2b57989 service nova] Releasing lock "refresh_cache-3228cd34-2144-425a-aca6-400cb0991e43" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.093114] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.205733] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5279c76a-7da4-aebc-e274-5a7c3de63118, 'name': SearchDatastore_Task, 'duration_secs': 0.012442} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.208107] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.208461] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1805.208700] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.208877] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.209065] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1805.209347] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7acd621-fd57-4662-af69-c10b822e19fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.219582] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1805.219769] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1805.220519] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cba9f5e-d134-4c1f-8704-ec5037a0ce5b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.226326] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1805.226326] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ab4a34-8540-2589-1e3e-9fdbb70fd0b1" [ 1805.226326] env[62820]: _type = "Task" [ 1805.226326] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.235515] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ab4a34-8540-2589-1e3e-9fdbb70fd0b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.249336] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Updating instance_info_cache with network_info: [{"id": "8ba6813f-c30f-416d-b888-4a33a10698ef", "address": "fa:16:3e:0d:dc:a3", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba6813f-c3", "ovs_interfaceid": "8ba6813f-c30f-416d-b888-4a33a10698ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.262783] env[62820]: DEBUG oslo_concurrency.lockutils [req-b478b182-0be0-4d5d-97d5-d57874575e08 req-c9e2a98b-a48c-4ffb-bde0-8b98ab9ba901 service nova] Releasing lock "refresh_cache-488900b2-d0c9-4437-9f0c-dfb2ea38cb71" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.263169] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "refresh_cache-488900b2-d0c9-4437-9f0c-dfb2ea38cb71" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.263325] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1805.592391] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.646161] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "f78bf828-b9ab-480e-bd58-3dd8587780ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.646515] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "f78bf828-b9ab-480e-bd58-3dd8587780ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.646812] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "f78bf828-b9ab-480e-bd58-3dd8587780ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.647027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "f78bf828-b9ab-480e-bd58-3dd8587780ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.647214] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "f78bf828-b9ab-480e-bd58-3dd8587780ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.649625] env[62820]: INFO nova.compute.manager [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Terminating instance [ 1805.737865] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ab4a34-8540-2589-1e3e-9fdbb70fd0b1, 'name': SearchDatastore_Task, 'duration_secs': 0.013235} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.738644] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ede54924-c8a1-481c-b0de-1c2eccddf7dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.744503] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1805.744503] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5261c134-99f6-6cc4-367f-74650398b2de" [ 1805.744503] env[62820]: _type = "Task" [ 1805.744503] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.753365] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-eafe98b7-a67d-4bab-bfc0-8367ae069d31" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.753536] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 1805.753755] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5261c134-99f6-6cc4-367f-74650398b2de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.753958] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.754135] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.754357] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.754550] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.754701] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.754865] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.754989] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1805.755135] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.798572] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1805.832626] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1805.835420] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2af3c7e2-16c5-43bc-a4ef-822050469d9c tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.837374] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.339s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.838866] env[62820]: INFO nova.compute.claims [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1805.864906] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1805.865495] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1805.865654] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1805.866142] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1805.866330] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1805.866505] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1805.866723] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1805.866917] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1805.867105] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1805.867294] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1805.867472] env[62820]: DEBUG nova.virt.hardware [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1805.868919] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f517dedd-6961-452d-a545-6d7f41b77f56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.879080] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f827c23-dce6-4cae-803b-bf3eec47a279 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.952842] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Updating instance_info_cache with network_info: [{"id": "609eb18a-aed2-4b6d-bb13-b94371396c84", "address": "fa:16:3e:13:37:7d", "network": {"id": "d1172873-31af-4376-9613-a996b597df96", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-894948617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bbc21e8b7764b9ebf63ec470ef4e1e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap609eb18a-ae", "ovs_interfaceid": "609eb18a-aed2-4b6d-bb13-b94371396c84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.093119] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.148718] env[62820]: DEBUG nova.compute.manager [req-4b91737b-682e-47a8-8bdc-2e5191ca21eb req-b1523496-02da-4187-b4dc-c1b33a8c0bfb service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Received event network-vif-plugged-3b0e0049-3600-401b-b074-0a891b2829e0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1806.149072] env[62820]: DEBUG oslo_concurrency.lockutils [req-4b91737b-682e-47a8-8bdc-2e5191ca21eb req-b1523496-02da-4187-b4dc-c1b33a8c0bfb service nova] Acquiring lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.149305] env[62820]: DEBUG oslo_concurrency.lockutils [req-4b91737b-682e-47a8-8bdc-2e5191ca21eb req-b1523496-02da-4187-b4dc-c1b33a8c0bfb service nova] Lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.149480] env[62820]: DEBUG oslo_concurrency.lockutils [req-4b91737b-682e-47a8-8bdc-2e5191ca21eb req-b1523496-02da-4187-b4dc-c1b33a8c0bfb service nova] Lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.149655] env[62820]: DEBUG nova.compute.manager [req-4b91737b-682e-47a8-8bdc-2e5191ca21eb req-b1523496-02da-4187-b4dc-c1b33a8c0bfb service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] No waiting events found dispatching network-vif-plugged-3b0e0049-3600-401b-b074-0a891b2829e0 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1806.149843] env[62820]: WARNING nova.compute.manager [req-4b91737b-682e-47a8-8bdc-2e5191ca21eb req-b1523496-02da-4187-b4dc-c1b33a8c0bfb service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Received unexpected event network-vif-plugged-3b0e0049-3600-401b-b074-0a891b2829e0 for instance with vm_state building and task_state spawning. [ 1806.152925] env[62820]: DEBUG nova.compute.manager [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1806.153097] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1806.154220] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcf191e-9ec7-4ef1-87a9-e5b95b2fe0ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.164823] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1806.165115] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e6a044d-2317-422e-8901-455db58f1b29 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.173704] env[62820]: DEBUG oslo_vmware.api [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1806.173704] env[62820]: value = "task-1696343" [ 1806.173704] env[62820]: _type = "Task" [ 1806.173704] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.183933] env[62820]: DEBUG oslo_vmware.api [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.260293] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.260293] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5261c134-99f6-6cc4-367f-74650398b2de, 'name': SearchDatastore_Task, 'duration_secs': 0.017485} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.261252] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Successfully updated port: 3b0e0049-3600-401b-b074-0a891b2829e0 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1806.262820] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.263629] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620/8d0e3ef5-55e9-4b4e-9252-4e3e921f4620.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1806.264343] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66e45e5c-49a5-4e63-879c-13b614404e00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.275216] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1806.275216] env[62820]: value = "task-1696344" [ 1806.275216] env[62820]: _type = "Task" [ 1806.275216] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.289898] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696344, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.457046] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "refresh_cache-488900b2-d0c9-4437-9f0c-dfb2ea38cb71" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.457519] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Instance network_info: |[{"id": "609eb18a-aed2-4b6d-bb13-b94371396c84", "address": "fa:16:3e:13:37:7d", "network": {"id": "d1172873-31af-4376-9613-a996b597df96", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-894948617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bbc21e8b7764b9ebf63ec470ef4e1e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap609eb18a-ae", "ovs_interfaceid": "609eb18a-aed2-4b6d-bb13-b94371396c84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1806.458057] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:37:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '609eb18a-aed2-4b6d-bb13-b94371396c84', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1806.466305] env[62820]: DEBUG oslo.service.loopingcall [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1806.466564] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1806.466802] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1af48e5-34b8-43a9-9f3c-d87a21082a74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.488766] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1806.488766] env[62820]: value = "task-1696345" [ 1806.488766] env[62820]: _type = "Task" [ 1806.488766] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.497998] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696345, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.593958] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.686370] env[62820]: DEBUG oslo_vmware.api [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696343, 'name': PowerOffVM_Task, 'duration_secs': 0.231564} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.686665] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1806.686845] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1806.687144] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30b9715d-3464-486e-b73b-415b963fb9c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.765698] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "refresh_cache-b44f0b18-cc2c-4208-ab54-d4cac8593b4d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.766058] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "refresh_cache-b44f0b18-cc2c-4208-ab54-d4cac8593b4d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.766280] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1806.777434] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1806.777680] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1806.777889] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleting the datastore file [datastore1] f78bf828-b9ab-480e-bd58-3dd8587780ea {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1806.781720] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce960f4b-a6f0-4ca6-94f0-ee9eb9a5c003 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.790505] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696344, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.792074] env[62820]: DEBUG oslo_vmware.api [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1806.792074] env[62820]: value = "task-1696347" [ 1806.792074] env[62820]: _type = "Task" [ 1806.792074] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.801455] env[62820]: DEBUG oslo_vmware.api [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.001589] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696345, 'name': CreateVM_Task, 'duration_secs': 0.501851} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.003845] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1807.004669] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.004821] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.005229] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1807.005485] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f84df12-7de1-4315-a0ec-9b5953c31b4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.010671] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1807.010671] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521aacc4-ac2d-dded-d778-d194a042a2a6" [ 1807.010671] env[62820]: _type = "Task" [ 1807.010671] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.020625] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521aacc4-ac2d-dded-d778-d194a042a2a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.094958] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.127988] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07e1ccd-90bd-481a-8cf2-6bfe046f6d63 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.135560] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57db880-ed46-401c-a1a2-b21bd3244cc9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.168289] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c793cde7-f29c-4464-9239-f7c1d7c02bd1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.176162] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb3e00b-fba0-4807-bdfc-1f48552d6106 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.190101] env[62820]: DEBUG nova.compute.provider_tree [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.288811] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696344, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61349} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.288811] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620/8d0e3ef5-55e9-4b4e-9252-4e3e921f4620.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1807.288811] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1807.288811] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-815a5643-ad68-43fe-9276-d5641bbbd226 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.296798] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1807.296798] env[62820]: value = "task-1696348" [ 1807.296798] env[62820]: _type = "Task" [ 1807.296798] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.306137] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1807.308073] env[62820]: DEBUG oslo_vmware.api [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.314059] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696348, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.483766] env[62820]: DEBUG nova.network.neutron [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Updating instance_info_cache with network_info: [{"id": "3b0e0049-3600-401b-b074-0a891b2829e0", "address": "fa:16:3e:03:42:42", "network": {"id": "d1172873-31af-4376-9613-a996b597df96", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-894948617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bbc21e8b7764b9ebf63ec470ef4e1e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b0e0049-36", "ovs_interfaceid": "3b0e0049-3600-401b-b074-0a891b2829e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.522987] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521aacc4-ac2d-dded-d778-d194a042a2a6, 'name': SearchDatastore_Task, 'duration_secs': 0.043366} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.522987] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.522987] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1807.522987] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.522987] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.523348] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1807.523646] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a53655b-5a8a-4202-ac0a-24f2b93be8a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.533136] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1807.533377] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1807.534537] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9fd7dd-859c-478d-9c00-83b58d905665 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.540356] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1807.540356] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5202f4fe-0ca5-0782-5387-1fba835b356d" [ 1807.540356] env[62820]: _type = "Task" [ 1807.540356] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.547928] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5202f4fe-0ca5-0782-5387-1fba835b356d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.594027] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.692789] env[62820]: DEBUG nova.scheduler.client.report [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1807.806419] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696348, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087609} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.808963] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1807.809292] env[62820]: DEBUG oslo_vmware.api [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.643644} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.809973] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a321186-48a1-47ce-9904-b6aba5a731fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.812315] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1807.812504] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1807.812683] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1807.812854] env[62820]: INFO nova.compute.manager [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1807.813099] env[62820]: DEBUG oslo.service.loopingcall [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.813293] env[62820]: DEBUG nova.compute.manager [-] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1807.813389] env[62820]: DEBUG nova.network.neutron [-] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1807.834738] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620/8d0e3ef5-55e9-4b4e-9252-4e3e921f4620.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1807.835257] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c4a21df-c6b2-414e-8ed6-62a47ba9745f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.856334] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1807.856334] env[62820]: value = "task-1696349" [ 1807.856334] env[62820]: _type = "Task" [ 1807.856334] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.864787] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696349, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.986382] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "refresh_cache-b44f0b18-cc2c-4208-ab54-d4cac8593b4d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.986775] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Instance network_info: |[{"id": "3b0e0049-3600-401b-b074-0a891b2829e0", "address": "fa:16:3e:03:42:42", "network": {"id": "d1172873-31af-4376-9613-a996b597df96", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-894948617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bbc21e8b7764b9ebf63ec470ef4e1e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b0e0049-36", "ovs_interfaceid": "3b0e0049-3600-401b-b074-0a891b2829e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1807.987343] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:42:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b0e0049-3600-401b-b074-0a891b2829e0', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1807.995000] env[62820]: DEBUG oslo.service.loopingcall [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.995242] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1807.995469] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e48ec85-f609-437e-9cf2-1950e986d6e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.016470] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1808.016470] env[62820]: value = "task-1696350" [ 1808.016470] env[62820]: _type = "Task" [ 1808.016470] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.024516] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696350, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.052276] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5202f4fe-0ca5-0782-5387-1fba835b356d, 'name': SearchDatastore_Task, 'duration_secs': 0.008564} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.052276] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98d8bfdc-fd4c-49d7-bb46-800f6b360a13 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.059089] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1808.059089] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52af5e28-0eff-266b-b1e1-baecf6e62132" [ 1808.059089] env[62820]: _type = "Task" [ 1808.059089] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.067478] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52af5e28-0eff-266b-b1e1-baecf6e62132, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.097817] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.179095] env[62820]: DEBUG nova.compute.manager [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Received event network-changed-3b0e0049-3600-401b-b074-0a891b2829e0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1808.179323] env[62820]: DEBUG nova.compute.manager [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Refreshing instance network info cache due to event network-changed-3b0e0049-3600-401b-b074-0a891b2829e0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1808.179552] env[62820]: DEBUG oslo_concurrency.lockutils [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] Acquiring lock "refresh_cache-b44f0b18-cc2c-4208-ab54-d4cac8593b4d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.179700] env[62820]: DEBUG oslo_concurrency.lockutils [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] Acquired lock "refresh_cache-b44f0b18-cc2c-4208-ab54-d4cac8593b4d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.179869] env[62820]: DEBUG nova.network.neutron [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Refreshing network info cache for port 3b0e0049-3600-401b-b074-0a891b2829e0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1808.197552] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.198164] env[62820]: DEBUG nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1808.200992] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.127s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.201239] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.203416] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.218s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.203634] env[62820]: DEBUG nova.objects.instance [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'resources' on Instance uuid 6768101f-8d1d-46be-b0b9-2fdf6cba08da {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1808.221303] env[62820]: INFO nova.scheduler.client.report [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted allocations for instance 35b95400-6399-48ae-b7d5-420c33d653dd [ 1808.368906] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696349, 'name': ReconfigVM_Task, 'duration_secs': 0.324642} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.369090] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620/8d0e3ef5-55e9-4b4e-9252-4e3e921f4620.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1808.369714] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-091d1cfa-a557-4854-b2fa-fe8325af524e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.376866] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1808.376866] env[62820]: value = "task-1696351" [ 1808.376866] env[62820]: _type = "Task" [ 1808.376866] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.385741] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696351, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.526925] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696350, 'name': CreateVM_Task, 'duration_secs': 0.47808} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.527194] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1808.527881] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.528061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.528388] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1808.528638] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87248225-616a-40c6-9244-c604970289ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.533923] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1808.533923] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52573037-c3df-7f3c-1d8d-ca1bde03f394" [ 1808.533923] env[62820]: _type = "Task" [ 1808.533923] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.542452] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52573037-c3df-7f3c-1d8d-ca1bde03f394, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.557692] env[62820]: DEBUG nova.network.neutron [-] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.569086] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52af5e28-0eff-266b-b1e1-baecf6e62132, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.569955] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.570247] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 488900b2-d0c9-4437-9f0c-dfb2ea38cb71/488900b2-d0c9-4437-9f0c-dfb2ea38cb71.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1808.570518] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f843c79-9e6a-40aa-8edf-bd018ab7da4b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.578127] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1808.578127] env[62820]: value = "task-1696352" [ 1808.578127] env[62820]: _type = "Task" [ 1808.578127] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.587129] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.596558] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.706285] env[62820]: DEBUG nova.compute.utils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1808.710915] env[62820]: DEBUG nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1808.711114] env[62820]: DEBUG nova.network.neutron [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1808.731337] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd91c320-0a3b-44ea-8c13-04b9f828c6bb tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "35b95400-6399-48ae-b7d5-420c33d653dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.271s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.756677] env[62820]: DEBUG nova.policy [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc838df5682041ed97e19ce34d9f14ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a07ed2a19149b3a58ee43a07e13bba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1808.893926] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696351, 'name': Rename_Task, 'duration_secs': 0.159252} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.894567] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1808.894567] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8be3fd19-4b80-4cc6-be6e-25fe4da29172 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.904927] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1808.904927] env[62820]: value = "task-1696353" [ 1808.904927] env[62820]: _type = "Task" [ 1808.904927] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.916100] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696353, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.938405] env[62820]: DEBUG nova.network.neutron [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Updated VIF entry in instance network info cache for port 3b0e0049-3600-401b-b074-0a891b2829e0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1808.939292] env[62820]: DEBUG nova.network.neutron [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Updating instance_info_cache with network_info: [{"id": "3b0e0049-3600-401b-b074-0a891b2829e0", "address": "fa:16:3e:03:42:42", "network": {"id": "d1172873-31af-4376-9613-a996b597df96", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-894948617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bbc21e8b7764b9ebf63ec470ef4e1e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b0e0049-36", "ovs_interfaceid": "3b0e0049-3600-401b-b074-0a891b2829e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1809.020821] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf148480-d43b-4cae-81a9-c4cfa74b6dd2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.030929] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262f8843-9620-4709-88d7-f762bba3ad44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.067153] env[62820]: INFO nova.compute.manager [-] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Took 1.25 seconds to deallocate network for instance. [ 1809.068829] env[62820]: DEBUG nova.network.neutron [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Successfully created port: 0e216ee5-769e-4393-89c5-540a8cb786a0 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1809.074797] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91e3fd0-b222-44fa-be4b-ac985952b906 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.092027] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52573037-c3df-7f3c-1d8d-ca1bde03f394, 'name': SearchDatastore_Task, 'duration_secs': 0.009354} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.094561] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.094894] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1809.095230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.095429] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.095675] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1809.102854] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-080ee8b3-4240-4a50-b625-710bcf07a2a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.106297] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8e11f2-8532-4b60-8ace-4780436874fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.117182] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.117454] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527618} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.118041] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 488900b2-d0c9-4437-9f0c-dfb2ea38cb71/488900b2-d0c9-4437-9f0c-dfb2ea38cb71.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1809.118883] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1809.118883] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2cd5b0ce-9901-48b2-b7b3-2fd2141aa136 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.130645] env[62820]: DEBUG nova.compute.provider_tree [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.133766] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1809.133993] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1809.135241] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a0235ad-c045-4921-ab58-04c060e927e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.139421] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1809.139421] env[62820]: value = "task-1696354" [ 1809.139421] env[62820]: _type = "Task" [ 1809.139421] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.144849] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1809.144849] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523814cd-b63c-c3ac-bda4-f6522dd6e858" [ 1809.144849] env[62820]: _type = "Task" [ 1809.144849] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.152107] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696354, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.158591] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523814cd-b63c-c3ac-bda4-f6522dd6e858, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.209715] env[62820]: DEBUG nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1809.415500] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696353, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.441287] env[62820]: DEBUG oslo_concurrency.lockutils [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] Releasing lock "refresh_cache-b44f0b18-cc2c-4208-ab54-d4cac8593b4d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.441569] env[62820]: DEBUG nova.compute.manager [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Received event network-vif-deleted-9b1b9e77-da64-4b99-b993-0175cd83f6b7 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1809.441753] env[62820]: INFO nova.compute.manager [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Neutron deleted interface 9b1b9e77-da64-4b99-b993-0175cd83f6b7; detaching it from the instance and deleting it from the info cache [ 1809.441939] env[62820]: DEBUG nova.network.neutron [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1809.590894] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.601626] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.638215] env[62820]: DEBUG nova.scheduler.client.report [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1809.655183] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696354, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.36562} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.659892] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1809.659892] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523814cd-b63c-c3ac-bda4-f6522dd6e858, 'name': SearchDatastore_Task, 'duration_secs': 0.015452} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.661359] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cdf585-b848-4b1a-8c1e-cc0690d3158b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.664810] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac0caf23-4866-428a-b606-83f4a210b095 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.673035] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1809.673035] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520fa367-c13f-befb-97ee-e66cb5ed295b" [ 1809.673035] env[62820]: _type = "Task" [ 1809.673035] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.690984] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 488900b2-d0c9-4437-9f0c-dfb2ea38cb71/488900b2-d0c9-4437-9f0c-dfb2ea38cb71.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1809.694680] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-319bbe6f-964f-4cae-9119-472cc1432210 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.722064] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1809.722064] env[62820]: value = "task-1696355" [ 1809.722064] env[62820]: _type = "Task" [ 1809.722064] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.722373] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520fa367-c13f-befb-97ee-e66cb5ed295b, 'name': SearchDatastore_Task, 'duration_secs': 0.027955} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.722717] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.723012] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b44f0b18-cc2c-4208-ab54-d4cac8593b4d/b44f0b18-cc2c-4208-ab54-d4cac8593b4d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1809.726640] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82cf3b19-55bf-47f2-8230-17779c9b877d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.736796] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696355, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.738500] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1809.738500] env[62820]: value = "task-1696356" [ 1809.738500] env[62820]: _type = "Task" [ 1809.738500] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.748922] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.920380] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696353, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.945292] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-600be91d-0706-40bd-ae8b-6d80edc42d73 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.956978] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e39bab-56a5-4610-84c3-4dc48ce389e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.001119] env[62820]: DEBUG nova.compute.manager [req-eb94c6b3-5748-4f49-ba18-d14b50820d23 req-d585cd5d-6f6d-444e-88c5-4c6f9150cc80 service nova] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Detach interface failed, port_id=9b1b9e77-da64-4b99-b993-0175cd83f6b7, reason: Instance f78bf828-b9ab-480e-bd58-3dd8587780ea could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1810.101070] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.144218] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.941s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.146774] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.237s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.147107] env[62820]: DEBUG nova.objects.instance [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lazy-loading 'resources' on Instance uuid a495b540-806d-4cd8-b340-86fe937867cd {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1810.168030] env[62820]: INFO nova.scheduler.client.report [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted allocations for instance 6768101f-8d1d-46be-b0b9-2fdf6cba08da [ 1810.219449] env[62820]: DEBUG nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1810.235320] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696355, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.251518] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.253867] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1810.254120] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1810.254282] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1810.254470] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1810.254618] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1810.254766] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1810.254975] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1810.255223] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1810.255411] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1810.255585] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1810.255816] env[62820]: DEBUG nova.virt.hardware [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1810.257049] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1b34e5-d89e-49b7-978c-6366c44002c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.266542] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f9be8c-655b-4c63-9b97-a9aea7fca2fe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.419974] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696353, 'name': PowerOnVM_Task, 'duration_secs': 1.033352} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.421237] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1810.421405] env[62820]: INFO nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Took 9.50 seconds to spawn the instance on the hypervisor. [ 1810.421620] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1810.422619] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109f1705-496e-4cee-8bb6-47b64e5ba9fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.612128] env[62820]: DEBUG oslo_vmware.api [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696342, 'name': ReconfigVM_Task, 'duration_secs': 5.966498} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.612128] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.612128] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Reconfigured VM to detach interface {{(pid=62820) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1810.621582] env[62820]: DEBUG nova.compute.manager [req-162f15f3-97c1-4b10-ac44-0cb71a1d521c req-5ab4e29f-da05-4e45-a27d-704e829743ca service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Received event network-vif-plugged-0e216ee5-769e-4393-89c5-540a8cb786a0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1810.621582] env[62820]: DEBUG oslo_concurrency.lockutils [req-162f15f3-97c1-4b10-ac44-0cb71a1d521c req-5ab4e29f-da05-4e45-a27d-704e829743ca service nova] Acquiring lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.621582] env[62820]: DEBUG oslo_concurrency.lockutils [req-162f15f3-97c1-4b10-ac44-0cb71a1d521c req-5ab4e29f-da05-4e45-a27d-704e829743ca service nova] Lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.621582] env[62820]: DEBUG oslo_concurrency.lockutils [req-162f15f3-97c1-4b10-ac44-0cb71a1d521c req-5ab4e29f-da05-4e45-a27d-704e829743ca service nova] Lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.621582] env[62820]: DEBUG nova.compute.manager [req-162f15f3-97c1-4b10-ac44-0cb71a1d521c req-5ab4e29f-da05-4e45-a27d-704e829743ca service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] No waiting events found dispatching network-vif-plugged-0e216ee5-769e-4393-89c5-540a8cb786a0 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1810.621582] env[62820]: WARNING nova.compute.manager [req-162f15f3-97c1-4b10-ac44-0cb71a1d521c req-5ab4e29f-da05-4e45-a27d-704e829743ca service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Received unexpected event network-vif-plugged-0e216ee5-769e-4393-89c5-540a8cb786a0 for instance with vm_state building and task_state spawning. [ 1810.678540] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4688c770-ef56-42a1-9218-a39ed0d3f433 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "6768101f-8d1d-46be-b0b9-2fdf6cba08da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.154s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.745580] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696355, 'name': ReconfigVM_Task, 'duration_secs': 0.745529} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.757468] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 488900b2-d0c9-4437-9f0c-dfb2ea38cb71/488900b2-d0c9-4437-9f0c-dfb2ea38cb71.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1810.759211] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94f99177-a094-4f53-9dec-cf4df3fa163d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.770312] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696356, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.776442] env[62820]: DEBUG nova.network.neutron [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Successfully updated port: 0e216ee5-769e-4393-89c5-540a8cb786a0 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1810.778187] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1810.778187] env[62820]: value = "task-1696357" [ 1810.778187] env[62820]: _type = "Task" [ 1810.778187] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.805020] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696357, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.949474] env[62820]: INFO nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Took 16.38 seconds to build instance. [ 1811.075041] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b587070-dd87-4035-8ac5-1b0b49fe3545 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.083833] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9e4b56-c954-415d-9eff-9b52419da66b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.118205] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e41b22-2c5a-4194-ae2f-a7a360079713 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.127335] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33f646b-7a85-4bf3-8a68-b8d30614dcc6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.145968] env[62820]: DEBUG nova.compute.provider_tree [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.258670] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696356, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.517181} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.259037] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] b44f0b18-cc2c-4208-ab54-d4cac8593b4d/b44f0b18-cc2c-4208-ab54-d4cac8593b4d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1811.259280] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1811.259544] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d35c3dd4-d245-4e92-9ba8-c4c706e0f88f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.268663] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1811.268663] env[62820]: value = "task-1696358" [ 1811.268663] env[62820]: _type = "Task" [ 1811.268663] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.277735] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.279709] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "refresh_cache-986d44bb-3d5c-4d3e-a569-45cb1da5c88e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.279878] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "refresh_cache-986d44bb-3d5c-4d3e-a569-45cb1da5c88e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.279993] env[62820]: DEBUG nova.network.neutron [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1811.290872] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696357, 'name': Rename_Task, 'duration_secs': 0.217562} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.292255] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1811.292582] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aba5285c-8541-46d6-894e-7b80a2554654 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.301040] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1811.301040] env[62820]: value = "task-1696359" [ 1811.301040] env[62820]: _type = "Task" [ 1811.301040] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.310038] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.452188] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.898s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.649387] env[62820]: DEBUG nova.scheduler.client.report [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1811.778970] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.811911] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696359, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.820749] env[62820]: DEBUG nova.network.neutron [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1812.006311] env[62820]: DEBUG nova.network.neutron [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Updating instance_info_cache with network_info: [{"id": "0e216ee5-769e-4393-89c5-540a8cb786a0", "address": "fa:16:3e:93:f5:0d", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e216ee5-76", "ovs_interfaceid": "0e216ee5-769e-4393-89c5-540a8cb786a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.154287] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.007s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.159049] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.899s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.159049] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.159049] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1812.159049] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.568s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.159049] env[62820]: DEBUG nova.objects.instance [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lazy-loading 'resources' on Instance uuid f78bf828-b9ab-480e-bd58-3dd8587780ea {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1812.160370] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ef7b28-9831-422d-9bd6-e68df2b6bbfe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.170585] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1540cb-660c-429d-8396-13bf476c2119 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.187144] env[62820]: INFO nova.scheduler.client.report [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted allocations for instance a495b540-806d-4cd8-b340-86fe937867cd [ 1812.188691] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62d053f-7a04-48a4-bb54-20b9634e3ed5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.201904] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cf3003-b460-4014-811d-d91195a5a200 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.236027] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179467MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1812.236027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.237777] env[62820]: DEBUG nova.compute.manager [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1812.279836] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.963936} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.280201] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1812.281033] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d45e019-1694-443b-a2ec-4ed601a4214d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.284348] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.284446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquired lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.284648] env[62820]: DEBUG nova.network.neutron [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1812.307457] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] b44f0b18-cc2c-4208-ab54-d4cac8593b4d/b44f0b18-cc2c-4208-ab54-d4cac8593b4d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1812.307990] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5114002-6fd9-47e5-84a9-f25b00b8940b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.333119] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696359, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.334453] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1812.334453] env[62820]: value = "task-1696360" [ 1812.334453] env[62820]: _type = "Task" [ 1812.334453] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.342901] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696360, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.379078] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.379381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.379620] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.379845] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.380165] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.382995] env[62820]: INFO nova.compute.manager [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Terminating instance [ 1812.509581] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "refresh_cache-986d44bb-3d5c-4d3e-a569-45cb1da5c88e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.509987] env[62820]: DEBUG nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Instance network_info: |[{"id": "0e216ee5-769e-4393-89c5-540a8cb786a0", "address": "fa:16:3e:93:f5:0d", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e216ee5-76", "ovs_interfaceid": "0e216ee5-769e-4393-89c5-540a8cb786a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1812.510490] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:f5:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a874c214-8cdf-4a41-a718-84262b2a28d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e216ee5-769e-4393-89c5-540a8cb786a0', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1812.518354] env[62820]: DEBUG oslo.service.loopingcall [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1812.518599] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1812.518860] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9e496a4-f107-4341-aedf-c47c37d98fa9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.540405] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1812.540405] env[62820]: value = "task-1696361" [ 1812.540405] env[62820]: _type = "Task" [ 1812.540405] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.550919] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696361, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.651387] env[62820]: DEBUG nova.compute.manager [req-a7ba3a42-268c-4bca-bc2f-2b8ee3a45afc req-c3a35b8a-de4f-4f1e-9a7c-9d75e5757588 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Received event network-changed-0e216ee5-769e-4393-89c5-540a8cb786a0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1812.651723] env[62820]: DEBUG nova.compute.manager [req-a7ba3a42-268c-4bca-bc2f-2b8ee3a45afc req-c3a35b8a-de4f-4f1e-9a7c-9d75e5757588 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Refreshing instance network info cache due to event network-changed-0e216ee5-769e-4393-89c5-540a8cb786a0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1812.652125] env[62820]: DEBUG oslo_concurrency.lockutils [req-a7ba3a42-268c-4bca-bc2f-2b8ee3a45afc req-c3a35b8a-de4f-4f1e-9a7c-9d75e5757588 service nova] Acquiring lock "refresh_cache-986d44bb-3d5c-4d3e-a569-45cb1da5c88e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.652410] env[62820]: DEBUG oslo_concurrency.lockutils [req-a7ba3a42-268c-4bca-bc2f-2b8ee3a45afc req-c3a35b8a-de4f-4f1e-9a7c-9d75e5757588 service nova] Acquired lock "refresh_cache-986d44bb-3d5c-4d3e-a569-45cb1da5c88e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.652618] env[62820]: DEBUG nova.network.neutron [req-a7ba3a42-268c-4bca-bc2f-2b8ee3a45afc req-c3a35b8a-de4f-4f1e-9a7c-9d75e5757588 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Refreshing network info cache for port 0e216ee5-769e-4393-89c5-540a8cb786a0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1812.700589] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9b1973c3-e3a2-4ac2-b639-bccdaa7d659b tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "a495b540-806d-4cd8-b340-86fe937867cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.789s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.755963] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.820889] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696359, 'name': PowerOnVM_Task, 'duration_secs': 1.438696} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.823328] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1812.823560] env[62820]: INFO nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Took 9.34 seconds to spawn the instance on the hypervisor. [ 1812.823746] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1812.824690] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f17138f-f2bb-40e3-b980-b3536ed13512 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.850603] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696360, 'name': ReconfigVM_Task, 'duration_secs': 0.331132} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.851505] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Reconfigured VM instance instance-0000006a to attach disk [datastore1] b44f0b18-cc2c-4208-ab54-d4cac8593b4d/b44f0b18-cc2c-4208-ab54-d4cac8593b4d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1812.851938] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e27206c-73b7-4fb8-87b9-60d817053135 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.860452] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1812.860452] env[62820]: value = "task-1696362" [ 1812.860452] env[62820]: _type = "Task" [ 1812.860452] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.872105] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696362, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.888852] env[62820]: DEBUG nova.compute.manager [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1812.888852] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1812.889986] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53d2a1f-21b4-4eac-8126-001d0afe9058 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.903995] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1812.903995] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33c39247-2ad0-477c-9ffc-d7afbb110248 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.915010] env[62820]: DEBUG oslo_vmware.api [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1812.915010] env[62820]: value = "task-1696363" [ 1812.915010] env[62820]: _type = "Task" [ 1812.915010] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.925559] env[62820]: DEBUG oslo_vmware.api [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.975625] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8516e9e-8401-4ece-a422-2bcb85c491a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.986849] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4136963c-a613-41ed-9ec5-a93b24721054 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.026967] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd103d1-7c5f-4510-9dbf-a99f614bb6c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.036394] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9068b075-3bb0-4179-8cd8-4110653745db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.053072] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696361, 'name': CreateVM_Task, 'duration_secs': 0.489706} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.066220] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1813.066220] env[62820]: DEBUG nova.compute.provider_tree [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1813.066220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.066220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.066220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1813.066430] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-219fbad2-0e46-4902-8aff-b9d34bc220c2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.073221] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1813.073221] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52faa0f4-17bc-83bc-1b7d-24f373dcd172" [ 1813.073221] env[62820]: _type = "Task" [ 1813.073221] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.085952] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52faa0f4-17bc-83bc-1b7d-24f373dcd172, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.145953] env[62820]: INFO nova.network.neutron [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Port 5120b8e3-6688-4386-9c99-ee01add07316 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1813.147304] env[62820]: DEBUG nova.network.neutron [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [{"id": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "address": "fa:16:3e:b5:da:cb", "network": {"id": "26851e2e-dece-4dce-bec8-e64227003b80", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-945912271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d4dc6b875b5420d87321f79b04bde9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapced8fea2-d4", "ovs_interfaceid": "ced8fea2-d4eb-4f3b-b2be-7974608dd130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.353089] env[62820]: INFO nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Took 18.69 seconds to build instance. [ 1813.375244] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696362, 'name': Rename_Task, 'duration_secs': 0.16537} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.376089] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1813.376342] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6834f5bc-28da-45c9-a76c-37df586cb7c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.383792] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1813.383792] env[62820]: value = "task-1696364" [ 1813.383792] env[62820]: _type = "Task" [ 1813.383792] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.390998] env[62820]: DEBUG nova.network.neutron [req-a7ba3a42-268c-4bca-bc2f-2b8ee3a45afc req-c3a35b8a-de4f-4f1e-9a7c-9d75e5757588 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Updated VIF entry in instance network info cache for port 0e216ee5-769e-4393-89c5-540a8cb786a0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1813.391523] env[62820]: DEBUG nova.network.neutron [req-a7ba3a42-268c-4bca-bc2f-2b8ee3a45afc req-c3a35b8a-de4f-4f1e-9a7c-9d75e5757588 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Updating instance_info_cache with network_info: [{"id": "0e216ee5-769e-4393-89c5-540a8cb786a0", "address": "fa:16:3e:93:f5:0d", "network": {"id": "572100a7-38f7-4870-baa4-f9827c290b35", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1874620608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a07ed2a19149b3a58ee43a07e13bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e216ee5-76", "ovs_interfaceid": "0e216ee5-769e-4393-89c5-540a8cb786a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.400334] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.434272] env[62820]: DEBUG oslo_vmware.api [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696363, 'name': PowerOffVM_Task, 'duration_secs': 0.211756} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.434633] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1813.435017] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1813.435322] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78deb4f2-58e6-41d8-9891-bdd69b64cfa0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.523657] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1813.523905] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1813.524142] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleting the datastore file [datastore1] 4ea2be66-06b4-4519-82b0-c2b1df329a5a {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1813.524417] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-648ce2bf-9654-4ac9-875c-3f212e119e88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.531677] env[62820]: DEBUG oslo_vmware.api [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1813.531677] env[62820]: value = "task-1696366" [ 1813.531677] env[62820]: _type = "Task" [ 1813.531677] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.540178] env[62820]: DEBUG oslo_vmware.api [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.568127] env[62820]: DEBUG nova.scheduler.client.report [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1813.588321] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52faa0f4-17bc-83bc-1b7d-24f373dcd172, 'name': SearchDatastore_Task, 'duration_secs': 0.017168} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.589553] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.589890] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1813.590256] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.590480] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.590758] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1813.591484] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2dedc5ab-1197-4878-9f2d-454cad96a044 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.602656] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1813.602906] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1813.603996] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fdc383a-23b0-4cfb-9132-9ec0c30617eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.612022] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1813.612022] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529ef80c-4bc2-b537-4cbc-2f3c7633b20d" [ 1813.612022] env[62820]: _type = "Task" [ 1813.612022] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.624096] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529ef80c-4bc2-b537-4cbc-2f3c7633b20d, 'name': SearchDatastore_Task} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.625647] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ad0ad6a-67ae-4033-a753-be8831080d14 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.633576] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1813.633576] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b7adeb-b18a-0539-823e-ad648f59e35d" [ 1813.633576] env[62820]: _type = "Task" [ 1813.633576] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.644810] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b7adeb-b18a-0539-823e-ad648f59e35d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.649451] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Releasing lock "refresh_cache-4ea2be66-06b4-4519-82b0-c2b1df329a5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.856702] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.206s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.896688] env[62820]: DEBUG oslo_concurrency.lockutils [req-a7ba3a42-268c-4bca-bc2f-2b8ee3a45afc req-c3a35b8a-de4f-4f1e-9a7c-9d75e5757588 service nova] Releasing lock "refresh_cache-986d44bb-3d5c-4d3e-a569-45cb1da5c88e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.897336] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696364, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.042218] env[62820]: DEBUG oslo_vmware.api [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166707} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.042586] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1814.042786] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1814.043020] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1814.043246] env[62820]: INFO nova.compute.manager [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1814.043533] env[62820]: DEBUG oslo.service.loopingcall [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1814.043795] env[62820]: DEBUG nova.compute.manager [-] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1814.043935] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1814.074716] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.916s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.081056] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.846s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.105711] env[62820]: INFO nova.scheduler.client.report [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted allocations for instance f78bf828-b9ab-480e-bd58-3dd8587780ea [ 1814.149985] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b7adeb-b18a-0539-823e-ad648f59e35d, 'name': SearchDatastore_Task, 'duration_secs': 0.011096} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.150363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.150717] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 986d44bb-3d5c-4d3e-a569-45cb1da5c88e/986d44bb-3d5c-4d3e-a569-45cb1da5c88e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1814.151028] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9000a882-11df-4d5c-9d45-26a3b5893e2b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.155113] env[62820]: DEBUG oslo_concurrency.lockutils [None req-268952c4-684f-4525-8f3c-d91a9e0b0c98 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "interface-4ea2be66-06b4-4519-82b0-c2b1df329a5a-5120b8e3-6688-4386-9c99-ee01add07316" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.199s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.162684] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1814.162684] env[62820]: value = "task-1696367" [ 1814.162684] env[62820]: _type = "Task" [ 1814.162684] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.180077] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.405243] env[62820]: DEBUG oslo_vmware.api [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696364, 'name': PowerOnVM_Task, 'duration_secs': 0.517394} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.405243] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1814.405243] env[62820]: INFO nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1814.405243] env[62820]: DEBUG nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1814.410665] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517f1812-a4bd-4ded-9b4b-976ec2165c83 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.612992] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6ffc6848-5b10-4815-82c7-5f7a5eeb9cf3 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "f78bf828-b9ab-480e-bd58-3dd8587780ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.966s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.678484] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696367, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.843976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.844435] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.904074] env[62820]: DEBUG nova.compute.manager [req-3ceab693-d1c3-47f8-934b-d53211899ca5 req-374647b5-fafe-4557-966c-da1d7dcf758b service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Received event network-vif-deleted-ced8fea2-d4eb-4f3b-b2be-7974608dd130 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1814.904369] env[62820]: INFO nova.compute.manager [req-3ceab693-d1c3-47f8-934b-d53211899ca5 req-374647b5-fafe-4557-966c-da1d7dcf758b service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Neutron deleted interface ced8fea2-d4eb-4f3b-b2be-7974608dd130; detaching it from the instance and deleting it from the info cache [ 1814.904633] env[62820]: DEBUG nova.network.neutron [req-3ceab693-d1c3-47f8-934b-d53211899ca5 req-374647b5-fafe-4557-966c-da1d7dcf758b service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.928810] env[62820]: INFO nova.compute.manager [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Took 20.23 seconds to build instance. [ 1815.098644] env[62820]: INFO nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating resource usage from migration 70a6c233-a354-40ad-9625-8975d924b672 [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance eafe98b7-a67d-4bab-bfc0-8367ae069d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0dd0e112-7a7c-4b37-8938-bb98aab2d485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 3228cd34-2144-425a-aca6-400cb0991e43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 6da857ea-f213-4b17-9e9f-d74d1ea649c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 03b0abc8-dd32-4cf9-8750-d64b8a66695e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 9c0d9676-9db9-4be2-a8e6-84bd816234aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a8486f52-998d-4308-813a-9c651e2eb093 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4ea2be66-06b4-4519-82b0-c2b1df329a5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance efe5ffe8-cd29-467d-85ad-d9e7d4eb9203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 392d8bca-2d8d-42c3-ba14-fc1387c75405 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a8803178-7fa3-42ea-824c-901063673062 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123192] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 488900b2-d0c9-4437-9f0c-dfb2ea38cb71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123665] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b44f0b18-cc2c-4208-ab54-d4cac8593b4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123665] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 986d44bb-3d5c-4d3e-a569-45cb1da5c88e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.123665] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Migration 70a6c233-a354-40ad-9625-8975d924b672 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1815.123665] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b89d32f8-0675-4b0c-977e-b7900e62bdd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1815.180032] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696367, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569556} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.180032] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 986d44bb-3d5c-4d3e-a569-45cb1da5c88e/986d44bb-3d5c-4d3e-a569-45cb1da5c88e.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1815.180032] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1815.180032] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f1b0864-8998-44e3-ad28-35fa37775a2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.188180] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1815.188180] env[62820]: value = "task-1696368" [ 1815.188180] env[62820]: _type = "Task" [ 1815.188180] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.197687] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696368, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.346441] env[62820]: DEBUG nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1815.380959] env[62820]: DEBUG nova.network.neutron [-] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.407735] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9920f687-6b4f-4dfb-b53e-b57321273be2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.417821] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016be8d3-d213-49af-a9b7-9442f5b038fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.430788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-68d33252-e3ba-4f49-90de-ce24aec1355d tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.746s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.455636] env[62820]: DEBUG nova.compute.manager [req-3ceab693-d1c3-47f8-934b-d53211899ca5 req-374647b5-fafe-4557-966c-da1d7dcf758b service nova] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Detach interface failed, port_id=ced8fea2-d4eb-4f3b-b2be-7974608dd130, reason: Instance 4ea2be66-06b4-4519-82b0-c2b1df329a5a could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1815.556295] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.556589] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.556825] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.557017] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.557193] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.562326] env[62820]: INFO nova.compute.manager [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Terminating instance [ 1815.628041] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1815.628526] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1815.628692] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1815.702895] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696368, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.194187} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.703219] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1815.704022] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bd8ac4-ea30-4489-8792-c269c2335f8e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.729123] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 986d44bb-3d5c-4d3e-a569-45cb1da5c88e/986d44bb-3d5c-4d3e-a569-45cb1da5c88e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1815.732251] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40ff275f-c041-4379-8a56-aa96e7d26928 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.760592] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1815.760592] env[62820]: value = "task-1696369" [ 1815.760592] env[62820]: _type = "Task" [ 1815.760592] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.776014] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696369, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.866417] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.886048] env[62820]: INFO nova.compute.manager [-] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Took 1.84 seconds to deallocate network for instance. [ 1815.930173] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68dfb694-667f-48ca-ac53-a063f37c7f56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.939440] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c48117-b26a-4e8a-b9d5-ba2a5c26e902 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.973908] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9c6f96-c629-4b16-818b-b8ef6d9424df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.982347] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fba9f58-c4c7-48be-bcb4-2f6665dc3de7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.996180] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1816.067446] env[62820]: DEBUG nova.compute.manager [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1816.067761] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1816.068945] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a901e90d-d9c9-477b-971a-e904542ad0ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.077639] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1816.077639] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87e2b50b-262c-42e3-82bd-3b1362387e21 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.083855] env[62820]: DEBUG oslo_vmware.api [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1816.083855] env[62820]: value = "task-1696370" [ 1816.083855] env[62820]: _type = "Task" [ 1816.083855] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.092570] env[62820]: DEBUG oslo_vmware.api [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.256974] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.257319] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.257541] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.257728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.257901] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.260217] env[62820]: INFO nova.compute.manager [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Terminating instance [ 1816.272495] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696369, 'name': ReconfigVM_Task, 'duration_secs': 0.329963} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.273411] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 986d44bb-3d5c-4d3e-a569-45cb1da5c88e/986d44bb-3d5c-4d3e-a569-45cb1da5c88e.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1816.274095] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df28c540-a7db-42f4-ac3b-6d14e87a9a04 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.281978] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1816.281978] env[62820]: value = "task-1696371" [ 1816.281978] env[62820]: _type = "Task" [ 1816.281978] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.292033] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696371, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.392663] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.499495] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1816.597522] env[62820]: DEBUG oslo_vmware.api [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696370, 'name': PowerOffVM_Task, 'duration_secs': 0.237905} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.597923] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1816.598196] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1816.598541] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90c94c6a-ce40-44ad-b2be-d3118424f083 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.688036] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1816.688036] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1816.688036] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleting the datastore file [datastore1] efe5ffe8-cd29-467d-85ad-d9e7d4eb9203 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1816.688218] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a08c2587-19fb-4356-81fb-b0cf6cee6f0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.697958] env[62820]: DEBUG oslo_vmware.api [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1816.697958] env[62820]: value = "task-1696373" [ 1816.697958] env[62820]: _type = "Task" [ 1816.697958] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.708844] env[62820]: DEBUG oslo_vmware.api [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.767963] env[62820]: DEBUG nova.compute.manager [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1816.768265] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1816.769227] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cc9381-60d4-457a-bd5c-4a62c749c03b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.777917] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1816.778208] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8750cf9e-d2d1-4153-938d-3aec3f521e60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.787999] env[62820]: DEBUG oslo_vmware.api [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1816.787999] env[62820]: value = "task-1696374" [ 1816.787999] env[62820]: _type = "Task" [ 1816.787999] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.795017] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696371, 'name': Rename_Task, 'duration_secs': 0.172945} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.795678] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1816.796667] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c0c1546-515c-4ef2-85b1-4947723b61ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.803055] env[62820]: DEBUG oslo_vmware.api [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.804121] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1816.804121] env[62820]: value = "task-1696375" [ 1816.804121] env[62820]: _type = "Task" [ 1816.804121] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.812729] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696375, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.005218] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1817.005218] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.924s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.005218] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.249s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.209137] env[62820]: DEBUG oslo_vmware.api [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291521} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.209137] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1817.209373] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1817.209534] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1817.209683] env[62820]: INFO nova.compute.manager [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1817.210131] env[62820]: DEBUG oslo.service.loopingcall [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1817.210131] env[62820]: DEBUG nova.compute.manager [-] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1817.210257] env[62820]: DEBUG nova.network.neutron [-] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1817.299843] env[62820]: DEBUG oslo_vmware.api [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696374, 'name': PowerOffVM_Task, 'duration_secs': 0.2856} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.300279] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1817.300480] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1817.300750] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c7fea3d-f861-413c-af7a-b22ec3ac93e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.313930] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696375, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.471097] env[62820]: DEBUG nova.compute.manager [req-3b457c7e-978a-4ed6-bfbb-8be6c4e742fa req-3e4fb0ec-b6a5-4d83-847e-857a59d4313e service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Received event network-vif-deleted-2d669a0f-9bd6-4e38-8692-b076be881645 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1817.471900] env[62820]: INFO nova.compute.manager [req-3b457c7e-978a-4ed6-bfbb-8be6c4e742fa req-3e4fb0ec-b6a5-4d83-847e-857a59d4313e service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Neutron deleted interface 2d669a0f-9bd6-4e38-8692-b076be881645; detaching it from the instance and deleting it from the info cache [ 1817.471900] env[62820]: DEBUG nova.network.neutron [req-3b457c7e-978a-4ed6-bfbb-8be6c4e742fa req-3e4fb0ec-b6a5-4d83-847e-857a59d4313e service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.489050] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1817.489364] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1817.489618] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleting the datastore file [datastore1] 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1817.490228] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6bf7b6fd-9462-4b14-ba15-958c893d36ea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.498470] env[62820]: DEBUG oslo_vmware.api [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1817.498470] env[62820]: value = "task-1696377" [ 1817.498470] env[62820]: _type = "Task" [ 1817.498470] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.507576] env[62820]: DEBUG oslo_vmware.api [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696377, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.510029] env[62820]: INFO nova.compute.claims [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1817.815826] env[62820]: DEBUG oslo_vmware.api [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696375, 'name': PowerOnVM_Task, 'duration_secs': 0.963524} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.816136] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1817.816348] env[62820]: INFO nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1817.816531] env[62820]: DEBUG nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1817.817321] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f319180-719d-4120-bb31-759abb29b094 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.945808] env[62820]: DEBUG nova.network.neutron [-] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.974695] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8e22936-0c88-4d00-95aa-68bf88a1d936 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.987124] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d8e871-f278-4a5b-9101-2efb8d87388f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.008144] env[62820]: DEBUG oslo_vmware.api [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696377, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322231} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.008447] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1818.008667] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1818.008815] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1818.009028] env[62820]: INFO nova.compute.manager [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1818.009290] env[62820]: DEBUG oslo.service.loopingcall [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.009493] env[62820]: DEBUG nova.compute.manager [-] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1818.009612] env[62820]: DEBUG nova.network.neutron [-] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1818.023881] env[62820]: INFO nova.compute.resource_tracker [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating resource usage from migration 70a6c233-a354-40ad-9625-8975d924b672 [ 1818.027034] env[62820]: DEBUG nova.compute.manager [req-3b457c7e-978a-4ed6-bfbb-8be6c4e742fa req-3e4fb0ec-b6a5-4d83-847e-857a59d4313e service nova] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Detach interface failed, port_id=2d669a0f-9bd6-4e38-8692-b076be881645, reason: Instance efe5ffe8-cd29-467d-85ad-d9e7d4eb9203 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1818.274408] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735daf87-31ff-434c-894b-b14c2a48e912 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.283578] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcda046-c998-442d-9633-9d25532c45e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.313839] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01616529-caa6-465d-a617-969d2fc1126e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.322567] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc4165f-8e69-40f0-a8e4-85086fc769ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.339259] env[62820]: DEBUG nova.compute.provider_tree [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.343270] env[62820]: INFO nova.compute.manager [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Took 17.86 seconds to build instance. [ 1818.448330] env[62820]: INFO nova.compute.manager [-] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Took 1.24 seconds to deallocate network for instance. [ 1818.764195] env[62820]: DEBUG nova.network.neutron [-] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.843583] env[62820]: DEBUG nova.scheduler.client.report [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1818.847403] env[62820]: DEBUG oslo_concurrency.lockutils [None req-78ae150f-dde5-40be-8128-4e3159cbfd26 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.386s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.956116] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.220881] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.221187] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.221433] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.221623] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.221795] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.223960] env[62820]: INFO nova.compute.manager [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Terminating instance [ 1819.268524] env[62820]: INFO nova.compute.manager [-] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Took 1.26 seconds to deallocate network for instance. [ 1819.349816] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.345s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.350150] env[62820]: INFO nova.compute.manager [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Migrating [ 1819.358758] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.493s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.360267] env[62820]: INFO nova.compute.claims [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1819.497036] env[62820]: DEBUG nova.compute.manager [req-4c585a9e-d1fa-43c3-a743-334c98a3ab76 req-01a98478-2582-4e2c-b16f-7a62c052bb16 service nova] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Received event network-vif-deleted-8dbeff3a-dcda-4f84-b5b3-b12c75219348 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1819.681048] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1819.681368] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1819.727716] env[62820]: DEBUG nova.compute.manager [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1819.727937] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1819.728810] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48999069-e492-492c-9ffd-8166bc5fc3ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.737121] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1819.737359] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cbbf186-a968-40b1-8eac-762501ae6b80 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.744747] env[62820]: DEBUG oslo_vmware.api [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1819.744747] env[62820]: value = "task-1696378" [ 1819.744747] env[62820]: _type = "Task" [ 1819.744747] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.755313] env[62820]: DEBUG oslo_vmware.api [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.775179] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.872821] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.872981] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.873129] env[62820]: DEBUG nova.network.neutron [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1820.254771] env[62820]: DEBUG oslo_vmware.api [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696378, 'name': PowerOffVM_Task, 'duration_secs': 0.189562} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.254771] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1820.254991] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1820.255188] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1a4185c-85a2-4d68-b2f8-213036e428bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.489190] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1820.489282] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1820.490228] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleting the datastore file [datastore1] 986d44bb-3d5c-4d3e-a569-45cb1da5c88e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1820.491992] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f07bec08-46db-47cf-8a77-9357cf833edb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.501238] env[62820]: DEBUG oslo_vmware.api [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for the task: (returnval){ [ 1820.501238] env[62820]: value = "task-1696380" [ 1820.501238] env[62820]: _type = "Task" [ 1820.501238] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.510336] env[62820]: DEBUG oslo_vmware.api [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.627998] env[62820]: DEBUG nova.network.neutron [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.632273] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d57852-d44d-4bad-bb34-49b2046b5a89 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.639159] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc89dbd-c217-4359-8154-3aa26f9dcc21 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.671949] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fff7651-7e8a-443e-b2cb-fa2a82aadff1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.679802] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca47c7b1-0624-49e7-b406-e347eeac6655 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.695150] env[62820]: DEBUG nova.compute.provider_tree [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1821.015775] env[62820]: DEBUG oslo_vmware.api [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Task: {'id': task-1696380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172157} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.016194] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.016462] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1821.016711] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1821.016987] env[62820]: INFO nova.compute.manager [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1821.017444] env[62820]: DEBUG oslo.service.loopingcall [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.017714] env[62820]: DEBUG nova.compute.manager [-] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1821.017844] env[62820]: DEBUG nova.network.neutron [-] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1821.135212] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.198192] env[62820]: DEBUG nova.scheduler.client.report [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1821.352326] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.352608] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.521903] env[62820]: DEBUG nova.compute.manager [req-3f44252a-b509-414c-b414-768cd07c906b req-a28fc6b2-a70c-4646-8d19-2958abca2d19 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Received event network-vif-deleted-0e216ee5-769e-4393-89c5-540a8cb786a0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1821.522027] env[62820]: INFO nova.compute.manager [req-3f44252a-b509-414c-b414-768cd07c906b req-a28fc6b2-a70c-4646-8d19-2958abca2d19 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Neutron deleted interface 0e216ee5-769e-4393-89c5-540a8cb786a0; detaching it from the instance and deleting it from the info cache [ 1821.522262] env[62820]: DEBUG nova.network.neutron [req-3f44252a-b509-414c-b414-768cd07c906b req-a28fc6b2-a70c-4646-8d19-2958abca2d19 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1821.703256] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.703838] env[62820]: DEBUG nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1821.706497] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.314s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.706740] env[62820]: DEBUG nova.objects.instance [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'resources' on Instance uuid 4ea2be66-06b4-4519-82b0-c2b1df329a5a {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1821.855322] env[62820]: INFO nova.compute.manager [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Detaching volume 1302a61c-1765-4676-9304-76b004523986 [ 1821.894125] env[62820]: INFO nova.virt.block_device [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Attempting to driver detach volume 1302a61c-1765-4676-9304-76b004523986 from mountpoint /dev/sdb [ 1821.894545] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1821.894844] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353648', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'name': 'volume-1302a61c-1765-4676-9304-76b004523986', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8486f52-998d-4308-813a-9c651e2eb093', 'attached_at': '', 'detached_at': '', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'serial': '1302a61c-1765-4676-9304-76b004523986'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1821.896064] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c89ee8e-cafa-4f8c-bd62-67eb4743c452 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.919773] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94768ef-652d-4422-9e0a-a332c1f578b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.928802] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf33c1e0-b6e5-4ce6-b1e5-52429bbca699 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.949244] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995f1f9f-4588-4adc-9f01-f5f89055cee9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.964198] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] The volume has not been displaced from its original location: [datastore1] volume-1302a61c-1765-4676-9304-76b004523986/volume-1302a61c-1765-4676-9304-76b004523986.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1821.969260] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Reconfiguring VM instance instance-0000003b to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1821.969528] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcf6d2f3-05c2-49f7-8493-99f5df3d80b6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.984166] env[62820]: DEBUG nova.network.neutron [-] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1821.987041] env[62820]: DEBUG oslo_vmware.api [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1821.987041] env[62820]: value = "task-1696381" [ 1821.987041] env[62820]: _type = "Task" [ 1821.987041] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.994694] env[62820]: DEBUG oslo_vmware.api [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.024859] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f9031fb-9311-4522-81f4-914f30164906 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.035331] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852d1c85-9ea1-47a3-a475-87b9024e49d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.068531] env[62820]: DEBUG nova.compute.manager [req-3f44252a-b509-414c-b414-768cd07c906b req-a28fc6b2-a70c-4646-8d19-2958abca2d19 service nova] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Detach interface failed, port_id=0e216ee5-769e-4393-89c5-540a8cb786a0, reason: Instance 986d44bb-3d5c-4d3e-a569-45cb1da5c88e could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1822.210876] env[62820]: DEBUG nova.compute.utils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1822.216171] env[62820]: DEBUG nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1822.216565] env[62820]: DEBUG nova.network.neutron [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1822.267489] env[62820]: DEBUG nova.policy [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815f8967d40e4943a66da6866de8b018', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14768f5b38ea4f6abf5583ce5e4409f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1822.486651] env[62820]: INFO nova.compute.manager [-] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Took 1.47 seconds to deallocate network for instance. [ 1822.502641] env[62820]: DEBUG oslo_vmware.api [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696381, 'name': ReconfigVM_Task, 'duration_secs': 0.25109} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.502641] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Reconfigured VM instance instance-0000003b to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1822.507253] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1136aa0-0fdb-4938-a115-8f150c5c43ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.530032] env[62820]: DEBUG oslo_vmware.api [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1822.530032] env[62820]: value = "task-1696382" [ 1822.530032] env[62820]: _type = "Task" [ 1822.530032] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.541993] env[62820]: DEBUG oslo_vmware.api [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696382, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.544729] env[62820]: DEBUG nova.network.neutron [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Successfully created port: b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1822.549038] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28949f9a-cf20-4327-a84d-765dbebe18ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.556950] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f7f9ba-a01e-4c4d-86ad-20b2b8c6709e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.588679] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd541ffe-8ff5-4184-9ac5-64d2026d8aa4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.596586] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fd9408-f27b-4440-8033-0b5558464309 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.610227] env[62820]: DEBUG nova.compute.provider_tree [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.649759] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20eb096b-e44b-435a-b0a8-891f7cc07b83 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.672330] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance 'b89d32f8-0675-4b0c-977e-b7900e62bdd8' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1822.716456] env[62820]: DEBUG nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1822.996321] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.039555] env[62820]: DEBUG oslo_vmware.api [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696382, 'name': ReconfigVM_Task, 'duration_secs': 0.165783} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.039884] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353648', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'name': 'volume-1302a61c-1765-4676-9304-76b004523986', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8486f52-998d-4308-813a-9c651e2eb093', 'attached_at': '', 'detached_at': '', 'volume_id': '1302a61c-1765-4676-9304-76b004523986', 'serial': '1302a61c-1765-4676-9304-76b004523986'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1823.113157] env[62820]: DEBUG nova.scheduler.client.report [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1823.177588] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1823.177996] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-188f2b73-9437-4541-8a43-6357f25baf48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.182793] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.185909] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1823.185909] env[62820]: value = "task-1696383" [ 1823.185909] env[62820]: _type = "Task" [ 1823.185909] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.195725] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.583359] env[62820]: DEBUG nova.objects.instance [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lazy-loading 'flavor' on Instance uuid a8486f52-998d-4308-813a-9c651e2eb093 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1823.618694] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.621057] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.665s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.621308] env[62820]: DEBUG nova.objects.instance [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lazy-loading 'resources' on Instance uuid efe5ffe8-cd29-467d-85ad-d9e7d4eb9203 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1823.640134] env[62820]: INFO nova.scheduler.client.report [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleted allocations for instance 4ea2be66-06b4-4519-82b0-c2b1df329a5a [ 1823.680283] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.680641] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1823.699114] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696383, 'name': PowerOffVM_Task, 'duration_secs': 0.296421} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.699401] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1823.699624] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance 'b89d32f8-0675-4b0c-977e-b7900e62bdd8' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1823.726770] env[62820]: DEBUG nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1823.752228] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1823.752522] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1823.752686] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1823.752878] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1823.753033] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1823.753187] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1823.753394] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1823.753553] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1823.753747] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1823.753927] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1823.754115] env[62820]: DEBUG nova.virt.hardware [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1823.755223] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186b00ab-f0b5-4bc1-ab8e-1f213b46b73e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.764082] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05974b01-f8b6-4f7c-a3e8-d9c33281308a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.979763] env[62820]: DEBUG nova.compute.manager [req-3eb58f83-2ee2-4db3-abb2-1805e6f5a6c7 req-82068243-973f-4150-97d2-d6d7f432ae08 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Received event network-vif-plugged-b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1823.979987] env[62820]: DEBUG oslo_concurrency.lockutils [req-3eb58f83-2ee2-4db3-abb2-1805e6f5a6c7 req-82068243-973f-4150-97d2-d6d7f432ae08 service nova] Acquiring lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.980219] env[62820]: DEBUG oslo_concurrency.lockutils [req-3eb58f83-2ee2-4db3-abb2-1805e6f5a6c7 req-82068243-973f-4150-97d2-d6d7f432ae08 service nova] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.980395] env[62820]: DEBUG oslo_concurrency.lockutils [req-3eb58f83-2ee2-4db3-abb2-1805e6f5a6c7 req-82068243-973f-4150-97d2-d6d7f432ae08 service nova] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.980567] env[62820]: DEBUG nova.compute.manager [req-3eb58f83-2ee2-4db3-abb2-1805e6f5a6c7 req-82068243-973f-4150-97d2-d6d7f432ae08 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] No waiting events found dispatching network-vif-plugged-b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1823.980733] env[62820]: WARNING nova.compute.manager [req-3eb58f83-2ee2-4db3-abb2-1805e6f5a6c7 req-82068243-973f-4150-97d2-d6d7f432ae08 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Received unexpected event network-vif-plugged-b4b5b723-be36-401c-8214-964a362697b6 for instance with vm_state building and task_state spawning. [ 1824.144465] env[62820]: DEBUG nova.network.neutron [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Successfully updated port: b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1824.154032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8c1926d6-a83e-4fa3-9b41-c053e39b24e7 tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "4ea2be66-06b4-4519-82b0-c2b1df329a5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.773s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.205206] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1824.205458] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1824.205621] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1824.205829] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1824.205945] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1824.206107] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1824.206316] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1824.206477] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1824.206640] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1824.206801] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1824.207213] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1824.213072] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.213209] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.213352] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1824.216884] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ddb0735-c91f-45f3-af92-1f8bcc555db6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.235815] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1824.235815] env[62820]: value = "task-1696384" [ 1824.235815] env[62820]: _type = "Task" [ 1824.235815] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.248037] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696384, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.380959] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3451057-c8f4-4f28-90ff-7eae6164a8c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.392253] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a2fa18-e54d-435b-8166-18d1ba50c8bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.430844] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3860e982-5651-4ae5-b65c-91fa048f445f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.439026] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89aba402-82d8-46b2-9877-bc47f145efeb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.454512] env[62820]: DEBUG nova.compute.provider_tree [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1824.482615] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.482615] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.482777] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.482815] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.482962] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.484832] env[62820]: INFO nova.compute.manager [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Terminating instance [ 1824.591832] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4c2f70b2-f22b-4bdf-86b2-6dec756c4ab8 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.239s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.652866] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.653098] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.653202] env[62820]: DEBUG nova.network.neutron [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1824.746333] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696384, 'name': ReconfigVM_Task, 'duration_secs': 0.178249} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.746637] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance 'b89d32f8-0675-4b0c-977e-b7900e62bdd8' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1824.958053] env[62820]: DEBUG nova.scheduler.client.report [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1824.988022] env[62820]: DEBUG nova.compute.manager [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1824.988239] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1824.989405] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd64ff5-3577-423d-9c4b-c21c492e0f45 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.997776] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1824.998014] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb08c0bb-2337-4924-a891-7790a9c2f563 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.004447] env[62820]: DEBUG oslo_vmware.api [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1825.004447] env[62820]: value = "task-1696385" [ 1825.004447] env[62820]: _type = "Task" [ 1825.004447] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.012565] env[62820]: DEBUG oslo_vmware.api [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696385, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.191188] env[62820]: DEBUG nova.network.neutron [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1825.254451] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1825.254674] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1825.254833] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1825.255027] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1825.255179] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1825.255327] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1825.255523] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1825.255682] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1825.255845] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1825.256016] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1825.256201] env[62820]: DEBUG nova.virt.hardware [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1825.261972] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Reconfiguring VM instance instance-00000033 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1825.264138] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eff09841-70b9-4e0c-92d2-33ec4db0c5b8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.284922] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1825.284922] env[62820]: value = "task-1696386" [ 1825.284922] env[62820]: _type = "Task" [ 1825.284922] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.294748] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696386, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.404154] env[62820]: DEBUG nova.network.neutron [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updating instance_info_cache with network_info: [{"id": "b4b5b723-be36-401c-8214-964a362697b6", "address": "fa:16:3e:53:8d:e2", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4b5b723-be", "ovs_interfaceid": "b4b5b723-be36-401c-8214-964a362697b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.463214] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.465380] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.690s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.465610] env[62820]: DEBUG nova.objects.instance [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lazy-loading 'resources' on Instance uuid 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1825.485043] env[62820]: INFO nova.scheduler.client.report [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted allocations for instance efe5ffe8-cd29-467d-85ad-d9e7d4eb9203 [ 1825.504163] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Updating instance_info_cache with network_info: [{"id": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "address": "fa:16:3e:8b:0f:9d", "network": {"id": "4563f837-c9c1-4ea4-b1a3-4e92d0613391", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-427321660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e5642bbb5de4060be9d4d0ae0f8d6a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c265b9-9a", "ovs_interfaceid": "a4c265b9-9afd-44f1-b48d-b95d490dc950", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.514887] env[62820]: DEBUG oslo_vmware.api [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696385, 'name': PowerOffVM_Task, 'duration_secs': 0.241316} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.515674] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1825.515849] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1825.516120] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cef77f57-497f-4bbd-869b-6193bb6cc1ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.650240] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1825.650599] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1825.650849] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleting the datastore file [datastore1] 9c0d9676-9db9-4be2-a8e6-84bd816234aa {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1825.651148] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0439f7d0-c95b-4abf-8a9d-ad46ef7f9fb6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.654787] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.655016] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.655234] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "a8486f52-998d-4308-813a-9c651e2eb093-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.655424] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.655593] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.658325] env[62820]: DEBUG oslo_vmware.api [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for the task: (returnval){ [ 1825.658325] env[62820]: value = "task-1696388" [ 1825.658325] env[62820]: _type = "Task" [ 1825.658325] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.658758] env[62820]: INFO nova.compute.manager [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Terminating instance [ 1825.670068] env[62820]: DEBUG oslo_vmware.api [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696388, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.794798] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696386, 'name': ReconfigVM_Task, 'duration_secs': 0.170421} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.795082] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Reconfigured VM instance instance-00000033 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1825.795840] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3f1479-4da4-4ec9-a8ab-c19870bd1d21 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.818637] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] b89d32f8-0675-4b0c-977e-b7900e62bdd8/b89d32f8-0675-4b0c-977e-b7900e62bdd8.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1825.818956] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f411553d-fd1a-49ba-ba13-c5a163c00dc0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.837430] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1825.837430] env[62820]: value = "task-1696389" [ 1825.837430] env[62820]: _type = "Task" [ 1825.837430] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.846507] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.909539] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.909706] env[62820]: DEBUG nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Instance network_info: |[{"id": "b4b5b723-be36-401c-8214-964a362697b6", "address": "fa:16:3e:53:8d:e2", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4b5b723-be", "ovs_interfaceid": "b4b5b723-be36-401c-8214-964a362697b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1825.910276] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:8d:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4b5b723-be36-401c-8214-964a362697b6', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1825.918806] env[62820]: DEBUG oslo.service.loopingcall [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.919119] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1825.919370] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84176def-12ad-4d12-a1aa-e7fb6c6a1424 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.940348] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1825.940348] env[62820]: value = "task-1696390" [ 1825.940348] env[62820]: _type = "Task" [ 1825.940348] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.948564] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696390, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.995910] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4f172797-0161-461e-83c5-0288700becbf tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "efe5ffe8-cd29-467d-85ad-d9e7d4eb9203" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.439s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.009694] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-0dd0e112-7a7c-4b37-8938-bb98aab2d485" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.010410] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 1826.010410] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.010547] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.010693] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1826.010895] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.023388] env[62820]: DEBUG nova.compute.manager [req-9e7910cf-5964-4c5c-a310-4a52ecb090fc req-9b4aae89-353c-4de7-a870-6deab1b1c75d service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Received event network-changed-b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1826.023590] env[62820]: DEBUG nova.compute.manager [req-9e7910cf-5964-4c5c-a310-4a52ecb090fc req-9b4aae89-353c-4de7-a870-6deab1b1c75d service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Refreshing instance network info cache due to event network-changed-b4b5b723-be36-401c-8214-964a362697b6. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1826.023839] env[62820]: DEBUG oslo_concurrency.lockutils [req-9e7910cf-5964-4c5c-a310-4a52ecb090fc req-9b4aae89-353c-4de7-a870-6deab1b1c75d service nova] Acquiring lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.024021] env[62820]: DEBUG oslo_concurrency.lockutils [req-9e7910cf-5964-4c5c-a310-4a52ecb090fc req-9b4aae89-353c-4de7-a870-6deab1b1c75d service nova] Acquired lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.024202] env[62820]: DEBUG nova.network.neutron [req-9e7910cf-5964-4c5c-a310-4a52ecb090fc req-9b4aae89-353c-4de7-a870-6deab1b1c75d service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Refreshing network info cache for port b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1826.166448] env[62820]: DEBUG nova.compute.manager [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1826.166621] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1826.173132] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd90bd5-f98e-4a0d-baec-5d87e523b751 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.176080] env[62820]: DEBUG oslo_vmware.api [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Task: {'id': task-1696388, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162106} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.176557] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1826.176752] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1826.176952] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1826.177150] env[62820]: INFO nova.compute.manager [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1826.177420] env[62820]: DEBUG oslo.service.loopingcall [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1826.177970] env[62820]: DEBUG nova.compute.manager [-] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1826.178121] env[62820]: DEBUG nova.network.neutron [-] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1826.183937] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1826.184243] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d16de91f-c43b-49c0-8235-4099e334e20c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.193635] env[62820]: DEBUG oslo_vmware.api [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1826.193635] env[62820]: value = "task-1696391" [ 1826.193635] env[62820]: _type = "Task" [ 1826.193635] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.203451] env[62820]: DEBUG oslo_vmware.api [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696391, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.247320] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3082d6-fee2-46a3-88d8-00174a2b8d12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.256651] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a81636-0deb-4371-b7a3-918072cd3e78 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.294614] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ebb3ae-dbef-4865-a59b-737495ebcd62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.303770] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1132f8-5aa9-465b-9050-1f0d9a8f33ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.321910] env[62820]: DEBUG nova.compute.provider_tree [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1826.348293] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696389, 'name': ReconfigVM_Task, 'duration_secs': 0.356574} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.348530] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Reconfigured VM instance instance-00000033 to attach disk [datastore1] b89d32f8-0675-4b0c-977e-b7900e62bdd8/b89d32f8-0675-4b0c-977e-b7900e62bdd8.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1826.348753] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance 'b89d32f8-0675-4b0c-977e-b7900e62bdd8' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1826.452821] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696390, 'name': CreateVM_Task, 'duration_secs': 0.432618} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.452937] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1826.453893] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.454094] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.454428] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1826.454699] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bbb27b9-9284-4bdc-b91e-c1653accad12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.460475] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1826.460475] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521c8a7b-9da8-7fa0-8d44-66c97d8ab62f" [ 1826.460475] env[62820]: _type = "Task" [ 1826.460475] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.469321] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521c8a7b-9da8-7fa0-8d44-66c97d8ab62f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.514055] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.574147] env[62820]: DEBUG nova.compute.manager [req-59a08dea-c5d2-498f-8943-d12d7657346e req-42adc66f-c6c1-4bc2-8643-e3d92c786e29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Received event network-vif-deleted-37f8bb7e-538f-426a-a4e3-1ae811cad8d3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1826.574147] env[62820]: INFO nova.compute.manager [req-59a08dea-c5d2-498f-8943-d12d7657346e req-42adc66f-c6c1-4bc2-8643-e3d92c786e29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Neutron deleted interface 37f8bb7e-538f-426a-a4e3-1ae811cad8d3; detaching it from the instance and deleting it from the info cache [ 1826.574147] env[62820]: DEBUG nova.network.neutron [req-59a08dea-c5d2-498f-8943-d12d7657346e req-42adc66f-c6c1-4bc2-8643-e3d92c786e29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.707218] env[62820]: DEBUG oslo_vmware.api [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696391, 'name': PowerOffVM_Task, 'duration_secs': 0.208958} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.707645] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1826.707645] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1826.707869] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b208c72-b8ba-478a-9a40-a488374d7437 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.730502] env[62820]: DEBUG nova.network.neutron [req-9e7910cf-5964-4c5c-a310-4a52ecb090fc req-9b4aae89-353c-4de7-a870-6deab1b1c75d service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updated VIF entry in instance network info cache for port b4b5b723-be36-401c-8214-964a362697b6. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1826.730861] env[62820]: DEBUG nova.network.neutron [req-9e7910cf-5964-4c5c-a310-4a52ecb090fc req-9b4aae89-353c-4de7-a870-6deab1b1c75d service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updating instance_info_cache with network_info: [{"id": "b4b5b723-be36-401c-8214-964a362697b6", "address": "fa:16:3e:53:8d:e2", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4b5b723-be", "ovs_interfaceid": "b4b5b723-be36-401c-8214-964a362697b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.825354] env[62820]: DEBUG nova.scheduler.client.report [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1826.857504] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d128e7e-5d05-49f6-9c3c-38db2808b8b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.879873] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26c9f5c-8ea7-4dc6-9867-53fa09c7445b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.901223] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance 'b89d32f8-0675-4b0c-977e-b7900e62bdd8' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1826.963791] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.964033] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.975347] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521c8a7b-9da8-7fa0-8d44-66c97d8ab62f, 'name': SearchDatastore_Task, 'duration_secs': 0.019802} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.976063] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.976301] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1826.976522] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.976667] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.976842] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.977310] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa06aa13-a2a2-4a99-af99-88f1510191aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.011024] env[62820]: DEBUG nova.network.neutron [-] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.080487] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19e7da3b-2d59-4583-861d-e254645af79f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.091628] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cffdd2d-6da4-4214-862c-ced95a3752df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.127451] env[62820]: DEBUG nova.compute.manager [req-59a08dea-c5d2-498f-8943-d12d7657346e req-42adc66f-c6c1-4bc2-8643-e3d92c786e29 service nova] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Detach interface failed, port_id=37f8bb7e-538f-426a-a4e3-1ae811cad8d3, reason: Instance 9c0d9676-9db9-4be2-a8e6-84bd816234aa could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1827.233577] env[62820]: DEBUG oslo_concurrency.lockutils [req-9e7910cf-5964-4c5c-a310-4a52ecb090fc req-9b4aae89-353c-4de7-a870-6deab1b1c75d service nova] Releasing lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.330588] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.335172] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.337s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.335172] env[62820]: DEBUG nova.objects.instance [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lazy-loading 'resources' on Instance uuid 986d44bb-3d5c-4d3e-a569-45cb1da5c88e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1827.354793] env[62820]: INFO nova.scheduler.client.report [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleted allocations for instance 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620 [ 1827.448193] env[62820]: DEBUG nova.network.neutron [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Port b234cdf0-fffd-452d-a277-6df15c22fa06 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1827.468917] env[62820]: DEBUG nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1827.512411] env[62820]: INFO nova.compute.manager [-] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Took 1.33 seconds to deallocate network for instance. [ 1827.861490] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a13913bd-4f1d-4afc-9f17-01296607c872 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "8d0e3ef5-55e9-4b4e-9252-4e3e921f4620" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.604s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.987731] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.018634] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.147388] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c543a2dd-69bd-46de-9117-552d94c386af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.155898] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab9bbda-4112-4dd2-9c40-de95d83e9e58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.187779] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780568e0-f235-4f60-badd-df0800be3e28 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.195561] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9973f7de-e562-4954-99e7-e34e4c201e4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.209215] env[62820]: DEBUG nova.compute.provider_tree [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.471024] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.471303] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.471488] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.715282] env[62820]: DEBUG nova.scheduler.client.report [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1828.726850] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.727156] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.727368] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.727556] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.727721] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.730979] env[62820]: INFO nova.compute.manager [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Terminating instance [ 1828.810262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.810262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.810262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.810262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.810262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.811020] env[62820]: INFO nova.compute.manager [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Terminating instance [ 1828.843443] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1828.843692] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1828.844679] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdefd88e-6487-48fe-b63b-42ffbbae2a4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.850613] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1828.850613] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528895bd-d747-cc76-16c0-28e65a1c63ad" [ 1828.850613] env[62820]: _type = "Task" [ 1828.850613] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.859203] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528895bd-d747-cc76-16c0-28e65a1c63ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.221109] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.223742] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.710s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.223931] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.224104] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1829.224413] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.237s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.226494] env[62820]: INFO nova.compute.claims [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1829.229700] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d23a91-daba-4806-86bc-5d19cc5bbb2a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.235647] env[62820]: DEBUG nova.compute.manager [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1829.235875] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1829.238921] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a11eae-f2a0-471f-835e-d7d2d49bfeb2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.243147] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7ca2b0-e38e-48de-a621-8251bbf387a8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.250978] env[62820]: INFO nova.scheduler.client.report [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Deleted allocations for instance 986d44bb-3d5c-4d3e-a569-45cb1da5c88e [ 1829.262229] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1829.265130] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab69ae1c-0875-49d2-b63e-80b9eef76f97 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.268109] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e00c38a-0fcb-4d98-a15f-bfdea9f7e7ea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.279627] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9583dedc-7fa1-4df7-86a0-3587563383e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.283111] env[62820]: DEBUG oslo_vmware.api [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1829.283111] env[62820]: value = "task-1696393" [ 1829.283111] env[62820]: _type = "Task" [ 1829.283111] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.314303] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179467MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1829.315040] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.315722] env[62820]: DEBUG nova.compute.manager [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1829.315929] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1829.317059] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144da596-2664-4ca6-af13-789770a331e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.323317] env[62820]: DEBUG oslo_vmware.api [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.326188] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "392d8bca-2d8d-42c3-ba14-fc1387c75405" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.326449] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "392d8bca-2d8d-42c3-ba14-fc1387c75405" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.326666] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "392d8bca-2d8d-42c3-ba14-fc1387c75405-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.326893] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "392d8bca-2d8d-42c3-ba14-fc1387c75405-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.327087] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "392d8bca-2d8d-42c3-ba14-fc1387c75405-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.331442] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1829.331944] env[62820]: INFO nova.compute.manager [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Terminating instance [ 1829.333338] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58273cf5-1097-4e9c-8061-4f0c1e948145 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.341714] env[62820]: DEBUG oslo_vmware.api [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1829.341714] env[62820]: value = "task-1696394" [ 1829.341714] env[62820]: _type = "Task" [ 1829.341714] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.355697] env[62820]: DEBUG oslo_vmware.api [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696394, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.367390] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528895bd-d747-cc76-16c0-28e65a1c63ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.510875] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.511149] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.511272] env[62820]: DEBUG nova.network.neutron [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1829.773825] env[62820]: DEBUG oslo_concurrency.lockutils [None req-be778200-6dae-4b34-be4e-a858ce3e1533 tempest-ServerDiskConfigTestJSON-184276959 tempest-ServerDiskConfigTestJSON-184276959-project-member] Lock "986d44bb-3d5c-4d3e-a569-45cb1da5c88e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.552s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.794313] env[62820]: DEBUG oslo_vmware.api [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696393, 'name': PowerOffVM_Task, 'duration_secs': 0.178917} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.794594] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1829.794760] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1829.795009] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c579790a-8fc9-4ec3-9599-d0ad66a17f8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.837854] env[62820]: DEBUG nova.compute.manager [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1829.838109] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1829.839052] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659e5ff2-2085-41aa-8e8a-276a116fa0df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.850397] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1829.854081] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f55e603-f350-48bd-9f7b-e7ab9bb3762f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.854998] env[62820]: DEBUG oslo_vmware.api [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696394, 'name': PowerOffVM_Task, 'duration_secs': 0.200179} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.855460] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1829.855511] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1829.858737] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df1c607e-beae-426d-8892-5d52e6bf040c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.862189] env[62820]: DEBUG oslo_vmware.api [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1829.862189] env[62820]: value = "task-1696396" [ 1829.862189] env[62820]: _type = "Task" [ 1829.862189] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.870357] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528895bd-d747-cc76-16c0-28e65a1c63ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.876682] env[62820]: DEBUG oslo_vmware.api [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696396, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.366540] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528895bd-d747-cc76-16c0-28e65a1c63ad, 'name': SearchDatastore_Task, 'duration_secs': 1.373164} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.373929] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4b7227f-f76c-4f96-9a3e-dcfaeb65002f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.381414] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1830.381414] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c1ba4b-00eb-e9a8-7c65-14e22d827670" [ 1830.381414] env[62820]: _type = "Task" [ 1830.381414] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.386214] env[62820]: DEBUG oslo_vmware.api [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696396, 'name': PowerOffVM_Task, 'duration_secs': 0.195604} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.390582] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1830.390956] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1830.391363] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b996d3f-d592-4f7c-ab50-f4b47d7a6792 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.405250] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c1ba4b-00eb-e9a8-7c65-14e22d827670, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.512579] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfa1e50-6f38-44a5-a947-f5a36b9919c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.522382] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504746c2-f121-4764-8300-370a315d35a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.556171] env[62820]: DEBUG nova.network.neutron [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.558517] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05324661-5905-4da4-a5d0-80f2a939b697 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.568534] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de63aec3-b497-4d1d-87c5-cfea7c298515 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.587937] env[62820]: DEBUG nova.compute.provider_tree [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.694314] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1830.694562] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1830.694902] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleting the datastore file [datastore1] 488900b2-d0c9-4437-9f0c-dfb2ea38cb71 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1830.696704] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b57dc4e-4701-485b-a901-e31f88c3c561 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.698732] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1830.698959] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1830.699191] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleting the datastore file [datastore1] b44f0b18-cc2c-4208-ab54-d4cac8593b4d {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1830.699467] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1830.699643] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1830.699797] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleting the datastore file [datastore1] a8486f52-998d-4308-813a-9c651e2eb093 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1830.700043] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfa77159-9e76-4b13-a123-e27fc37ade5b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.702289] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5d1a34e-dbc2-4159-aaff-dc179696fc81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.710355] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1830.710611] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1830.710764] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleting the datastore file [datastore1] 392d8bca-2d8d-42c3-ba14-fc1387c75405 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1830.711551] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d69e4d7-ff96-482a-86ed-ae221d9b2e36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.715809] env[62820]: DEBUG oslo_vmware.api [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1830.715809] env[62820]: value = "task-1696399" [ 1830.715809] env[62820]: _type = "Task" [ 1830.715809] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.716092] env[62820]: DEBUG oslo_vmware.api [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1830.716092] env[62820]: value = "task-1696400" [ 1830.716092] env[62820]: _type = "Task" [ 1830.716092] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.716337] env[62820]: DEBUG oslo_vmware.api [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for the task: (returnval){ [ 1830.716337] env[62820]: value = "task-1696401" [ 1830.716337] env[62820]: _type = "Task" [ 1830.716337] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.729721] env[62820]: DEBUG oslo_vmware.api [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1830.729721] env[62820]: value = "task-1696402" [ 1830.729721] env[62820]: _type = "Task" [ 1830.729721] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.741296] env[62820]: DEBUG oslo_vmware.api [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.741594] env[62820]: DEBUG oslo_vmware.api [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696399, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.744978] env[62820]: DEBUG oslo_vmware.api [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.750464] env[62820]: DEBUG oslo_vmware.api [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.900952] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c1ba4b-00eb-e9a8-7c65-14e22d827670, 'name': SearchDatastore_Task, 'duration_secs': 0.284407} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.901434] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.901830] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8/25c8d7b7-d639-474a-b5cc-c01a6a0a79f8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1830.902227] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ee06670-63f8-42d5-9606-f44df55ae0f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.910223] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1830.910223] env[62820]: value = "task-1696403" [ 1830.910223] env[62820]: _type = "Task" [ 1830.910223] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.919122] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.063195] env[62820]: DEBUG oslo_concurrency.lockutils [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.091338] env[62820]: DEBUG nova.scheduler.client.report [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1831.237181] env[62820]: DEBUG oslo_vmware.api [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175697} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.237492] env[62820]: DEBUG oslo_vmware.api [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696399, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171217} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.245114] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1831.245482] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1831.245768] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1831.246041] env[62820]: INFO nova.compute.manager [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Took 5.08 seconds to destroy the instance on the hypervisor. [ 1831.246397] env[62820]: DEBUG oslo.service.loopingcall [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1831.246711] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1831.246984] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1831.247291] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1831.247580] env[62820]: INFO nova.compute.manager [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Took 1.93 seconds to destroy the instance on the hypervisor. [ 1831.247912] env[62820]: DEBUG oslo.service.loopingcall [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1831.248249] env[62820]: DEBUG oslo_vmware.api [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Task: {'id': task-1696401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172151} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.248558] env[62820]: DEBUG nova.compute.manager [-] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1831.248723] env[62820]: DEBUG nova.network.neutron [-] [instance: a8486f52-998d-4308-813a-9c651e2eb093] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1831.250630] env[62820]: DEBUG nova.compute.manager [-] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1831.250728] env[62820]: DEBUG nova.network.neutron [-] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1831.252418] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1831.252647] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1831.252865] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1831.253087] env[62820]: INFO nova.compute.manager [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Took 2.02 seconds to destroy the instance on the hypervisor. [ 1831.253354] env[62820]: DEBUG oslo.service.loopingcall [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1831.254016] env[62820]: DEBUG nova.compute.manager [-] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1831.254149] env[62820]: DEBUG nova.network.neutron [-] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1831.260696] env[62820]: DEBUG oslo_vmware.api [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175808} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.261563] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1831.261826] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1831.262071] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1831.262304] env[62820]: INFO nova.compute.manager [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Took 1.42 seconds to destroy the instance on the hypervisor. [ 1831.262566] env[62820]: DEBUG oslo.service.loopingcall [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1831.262807] env[62820]: DEBUG nova.compute.manager [-] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1831.262924] env[62820]: DEBUG nova.network.neutron [-] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1831.421127] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448787} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.421612] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8/25c8d7b7-d639-474a-b5cc-c01a6a0a79f8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1831.422660] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1831.422660] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f19ac05-9a60-4143-a91e-cc7ccff1cf8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.430755] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1831.430755] env[62820]: value = "task-1696404" [ 1831.430755] env[62820]: _type = "Task" [ 1831.430755] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.442983] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.591174] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35d0a83-a09b-47be-8e1f-48fe1250668c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.611174] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.611704] env[62820]: DEBUG nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1831.620263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.597s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.620263] env[62820]: DEBUG nova.objects.instance [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lazy-loading 'resources' on Instance uuid 9c0d9676-9db9-4be2-a8e6-84bd816234aa {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1831.620263] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08825a86-79e4-4b33-bba1-ce8799d62c08 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.626863] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance 'b89d32f8-0675-4b0c-977e-b7900e62bdd8' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1831.632239] env[62820]: DEBUG nova.compute.manager [req-a3abaff8-2829-43f6-95ec-5d8169321729 req-9bd9ff0c-040c-40bc-906d-5c2735aa2cc2 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Received event network-vif-deleted-3b0e0049-3600-401b-b074-0a891b2829e0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1831.633066] env[62820]: INFO nova.compute.manager [req-a3abaff8-2829-43f6-95ec-5d8169321729 req-9bd9ff0c-040c-40bc-906d-5c2735aa2cc2 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Neutron deleted interface 3b0e0049-3600-401b-b074-0a891b2829e0; detaching it from the instance and deleting it from the info cache [ 1831.633290] env[62820]: DEBUG nova.network.neutron [req-a3abaff8-2829-43f6-95ec-5d8169321729 req-9bd9ff0c-040c-40bc-906d-5c2735aa2cc2 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.942694] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068059} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.943232] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1831.944235] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee28b38-cf75-49d3-8303-6ebde98c7b23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.980620] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8/25c8d7b7-d639-474a-b5cc-c01a6a0a79f8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1831.980957] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3fa9ca9-d3ec-49d6-814b-97d3a0b57925 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.014241] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1832.014241] env[62820]: value = "task-1696405" [ 1832.014241] env[62820]: _type = "Task" [ 1832.014241] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.023999] env[62820]: DEBUG nova.compute.manager [req-4204e259-a8bb-4761-953d-9acb7d377777 req-4be89a76-dfed-4b84-9761-2389cab935dd service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Received event network-vif-deleted-609eb18a-aed2-4b6d-bb13-b94371396c84 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1832.024272] env[62820]: INFO nova.compute.manager [req-4204e259-a8bb-4761-953d-9acb7d377777 req-4be89a76-dfed-4b84-9761-2389cab935dd service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Neutron deleted interface 609eb18a-aed2-4b6d-bb13-b94371396c84; detaching it from the instance and deleting it from the info cache [ 1832.024464] env[62820]: DEBUG nova.network.neutron [req-4204e259-a8bb-4761-953d-9acb7d377777 req-4be89a76-dfed-4b84-9761-2389cab935dd service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.030048] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696405, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.122264] env[62820]: DEBUG nova.compute.utils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1832.123632] env[62820]: DEBUG nova.network.neutron [-] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.127427] env[62820]: DEBUG nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1832.127595] env[62820]: DEBUG nova.network.neutron [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1832.136585] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1832.137515] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3725367-22b7-4426-b152-6242d02553a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.141074] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc9522f5-51b5-4c5b-8a0c-5d197d2cc1f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.153534] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374b53d9-43b5-419f-bfa2-fe2d2ae05c2c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.165616] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1832.165616] env[62820]: value = "task-1696406" [ 1832.165616] env[62820]: _type = "Task" [ 1832.165616] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.178571] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696406, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.190856] env[62820]: DEBUG nova.compute.manager [req-a3abaff8-2829-43f6-95ec-5d8169321729 req-9bd9ff0c-040c-40bc-906d-5c2735aa2cc2 service nova] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Detach interface failed, port_id=3b0e0049-3600-401b-b074-0a891b2829e0, reason: Instance b44f0b18-cc2c-4208-ab54-d4cac8593b4d could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1832.226662] env[62820]: DEBUG nova.policy [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba8e4dc4cd634bf293d02187fbc77b72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ca1b6f7bda3437eb67f5f765b5864a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1832.394880] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bb97b0-3c07-4d55-991c-cd3bfcabb44f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.406578] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffca1c4a-37cb-4d41-86f1-d5add8a56429 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.446644] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc810b4a-a85a-478d-becb-76292abeb437 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.456654] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9207c23-b579-4eed-93f0-fa1db338a8fe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.474457] env[62820]: DEBUG nova.compute.provider_tree [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1832.509225] env[62820]: DEBUG nova.network.neutron [-] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.527624] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696405, 'name': ReconfigVM_Task, 'duration_secs': 0.275568} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.527624] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8/25c8d7b7-d639-474a-b5cc-c01a6a0a79f8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1832.528405] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1d41575-8044-4f7f-8289-00ef45db4c10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.531063] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57a0aa80-0405-4f64-a425-f7dfa6d2aacf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.536880] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1832.536880] env[62820]: value = "task-1696407" [ 1832.536880] env[62820]: _type = "Task" [ 1832.536880] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.548438] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cef4d5-94f1-4936-b1a1-bc5c552f69a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.571555] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696407, 'name': Rename_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.586502] env[62820]: DEBUG nova.compute.manager [req-4204e259-a8bb-4761-953d-9acb7d377777 req-4be89a76-dfed-4b84-9761-2389cab935dd service nova] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Detach interface failed, port_id=609eb18a-aed2-4b6d-bb13-b94371396c84, reason: Instance 488900b2-d0c9-4437-9f0c-dfb2ea38cb71 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1832.626448] env[62820]: DEBUG nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1832.632383] env[62820]: INFO nova.compute.manager [-] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Took 1.38 seconds to deallocate network for instance. [ 1832.674787] env[62820]: DEBUG nova.network.neutron [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Successfully created port: c61b3713-021f-484e-a5bf-16202f289715 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1832.686896] env[62820]: DEBUG oslo_vmware.api [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696406, 'name': PowerOnVM_Task, 'duration_secs': 0.409017} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.687416] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1832.687743] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-af2443a5-4bcf-48f7-8563-7cc323520c21 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance 'b89d32f8-0675-4b0c-977e-b7900e62bdd8' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1832.906430] env[62820]: DEBUG nova.network.neutron [-] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.913865] env[62820]: DEBUG nova.network.neutron [-] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.000943] env[62820]: ERROR nova.scheduler.client.report [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] [req-53aecf15-4838-4e7a-81e8-f9939c5f2f17] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-53aecf15-4838-4e7a-81e8-f9939c5f2f17"}]} [ 1833.013193] env[62820]: INFO nova.compute.manager [-] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Took 1.76 seconds to deallocate network for instance. [ 1833.020304] env[62820]: DEBUG nova.scheduler.client.report [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1833.040264] env[62820]: DEBUG nova.scheduler.client.report [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1833.040491] env[62820]: DEBUG nova.compute.provider_tree [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1833.057488] env[62820]: DEBUG nova.scheduler.client.report [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1833.067021] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696407, 'name': Rename_Task, 'duration_secs': 0.155413} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.067021] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1833.067021] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb65048f-5b29-45ff-9a8f-553ea602f303 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.072498] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1833.072498] env[62820]: value = "task-1696408" [ 1833.072498] env[62820]: _type = "Task" [ 1833.072498] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.083888] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.085880] env[62820]: DEBUG nova.scheduler.client.report [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1833.136731] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.329568] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f41984f-0724-4b9f-a340-acc0865a0a6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.337591] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fff360-09ce-4f11-a594-8304ec7fdbcf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.368419] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89f9323-109b-42fc-ba0c-9b2aa8ca6737 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.376239] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e5b411-36d1-42da-9720-687d39836d4c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.391573] env[62820]: DEBUG nova.compute.provider_tree [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1833.409676] env[62820]: INFO nova.compute.manager [-] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Took 2.15 seconds to deallocate network for instance. [ 1833.416962] env[62820]: INFO nova.compute.manager [-] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Took 2.17 seconds to deallocate network for instance. [ 1833.521449] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.584303] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696408, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.636894] env[62820]: DEBUG nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1833.664340] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1833.664606] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1833.664767] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1833.664950] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1833.665115] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1833.665269] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1833.665477] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1833.665637] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1833.665803] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1833.665964] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1833.666157] env[62820]: DEBUG nova.virt.hardware [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1833.667109] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89aa8a5b-c5b2-4e57-8df7-03e404abfbd3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.675596] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738b70f2-9bc0-43de-8d76-cbab6dcf211a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.917268] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.922554] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.940294] env[62820]: DEBUG nova.scheduler.client.report [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1833.940576] env[62820]: DEBUG nova.compute.provider_tree [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 160 to 161 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1833.940798] env[62820]: DEBUG nova.compute.provider_tree [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1834.075868] env[62820]: DEBUG nova.compute.manager [req-9a402577-9ff5-4b21-bcd4-f52fa54323cd req-636fcccc-74cf-48b2-92a1-9f95d7d18f26 service nova] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Received event network-vif-deleted-f54d08d7-24e7-4c0b-8b56-118bdc4e2e96 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1834.075868] env[62820]: DEBUG nova.compute.manager [req-9a402577-9ff5-4b21-bcd4-f52fa54323cd req-636fcccc-74cf-48b2-92a1-9f95d7d18f26 service nova] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Received event network-vif-deleted-89ab754d-6988-4b28-882b-5f352eda86ec {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1834.097486] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696408, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.407349] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "3228cd34-2144-425a-aca6-400cb0991e43" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.408041] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.446249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.830s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.448831] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.134s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.464362] env[62820]: DEBUG nova.network.neutron [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Successfully updated port: c61b3713-021f-484e-a5bf-16202f289715 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1834.475408] env[62820]: INFO nova.scheduler.client.report [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Deleted allocations for instance 9c0d9676-9db9-4be2-a8e6-84bd816234aa [ 1834.554438] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.554681] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.554887] env[62820]: DEBUG nova.compute.manager [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Going to confirm migration 7 {{(pid=62820) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5127}} [ 1834.586564] env[62820]: DEBUG oslo_vmware.api [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696408, 'name': PowerOnVM_Task, 'duration_secs': 1.054863} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.586847] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1834.587090] env[62820]: INFO nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Took 10.86 seconds to spawn the instance on the hypervisor. [ 1834.587284] env[62820]: DEBUG nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1834.588099] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a946b9-c34c-497d-b482-c6b586c2ab74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.912417] env[62820]: INFO nova.compute.manager [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Detaching volume a9165bc5-6390-450a-8758-45dec9de7f6a [ 1834.947267] env[62820]: INFO nova.virt.block_device [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Attempting to driver detach volume a9165bc5-6390-450a-8758-45dec9de7f6a from mountpoint /dev/sdb [ 1834.947524] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1834.947717] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353657', 'volume_id': 'a9165bc5-6390-450a-8758-45dec9de7f6a', 'name': 'volume-a9165bc5-6390-450a-8758-45dec9de7f6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3228cd34-2144-425a-aca6-400cb0991e43', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9165bc5-6390-450a-8758-45dec9de7f6a', 'serial': 'a9165bc5-6390-450a-8758-45dec9de7f6a'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1834.948660] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88924e64-7708-4fc4-8c47-5c62d5c73bf3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.982960] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "refresh_cache-3bff732c-9d4f-4dfa-8058-42c4dbde2efe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.983126] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "refresh_cache-3bff732c-9d4f-4dfa-8058-42c4dbde2efe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.983283] env[62820]: DEBUG nova.network.neutron [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1834.987558] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984f25e7-4622-43f3-99bb-89b40086acef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.990762] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f8433ee-eb6b-44e8-bd1e-1d820fd5b8ac tempest-AttachInterfacesTestJSON-2066427417 tempest-AttachInterfacesTestJSON-2066427417-project-member] Lock "9c0d9676-9db9-4be2-a8e6-84bd816234aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.508s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.997480] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46023b2-5028-4ec5-a228-f86daa0e238a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.018898] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be30ad9b-f329-4fb0-8cf2-552dd8cebc05 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.037978] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] The volume has not been displaced from its original location: [datastore1] volume-a9165bc5-6390-450a-8758-45dec9de7f6a/volume-a9165bc5-6390-450a-8758-45dec9de7f6a.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1835.043119] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfiguring VM instance instance-00000047 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1835.043427] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbac7a6d-43bf-4adf-971e-6cad7b6cdd92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.063151] env[62820]: DEBUG oslo_vmware.api [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1835.063151] env[62820]: value = "task-1696409" [ 1835.063151] env[62820]: _type = "Task" [ 1835.063151] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.071510] env[62820]: DEBUG oslo_vmware.api [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.105908] env[62820]: INFO nova.compute.manager [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Took 19.25 seconds to build instance. [ 1835.196818] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.197013] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.197816] env[62820]: DEBUG nova.network.neutron [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1835.197816] env[62820]: DEBUG nova.objects.instance [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'info_cache' on Instance uuid b89d32f8-0675-4b0c-977e-b7900e62bdd8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1835.461488] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Applying migration context for instance b89d32f8-0675-4b0c-977e-b7900e62bdd8 as it has an incoming, in-progress migration 70a6c233-a354-40ad-9625-8975d924b672. Migration status is confirming {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1835.464122] env[62820]: INFO nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating resource usage from migration 70a6c233-a354-40ad-9625-8975d924b672 [ 1835.488675] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance eafe98b7-a67d-4bab-bfc0-8367ae069d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.488833] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 0dd0e112-7a7c-4b37-8938-bb98aab2d485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.488961] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.489142] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 3228cd34-2144-425a-aca6-400cb0991e43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.489253] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 6da857ea-f213-4b17-9e9f-d74d1ea649c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.489372] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 03b0abc8-dd32-4cf9-8750-d64b8a66695e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.489513] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a8486f52-998d-4308-813a-9c651e2eb093 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.489641] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 392d8bca-2d8d-42c3-ba14-fc1387c75405 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.489755] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance a8803178-7fa3-42ea-824c-901063673062 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.489913] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 488900b2-d0c9-4437-9f0c-dfb2ea38cb71 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.490157] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b44f0b18-cc2c-4208-ab54-d4cac8593b4d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.490322] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Migration 70a6c233-a354-40ad-9625-8975d924b672 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1835.490447] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance b89d32f8-0675-4b0c-977e-b7900e62bdd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.490562] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.490674] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 3bff732c-9d4f-4dfa-8058-42c4dbde2efe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.490890] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1835.491222] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1835.523822] env[62820]: DEBUG nova.network.neutron [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1835.580247] env[62820]: DEBUG oslo_vmware.api [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696409, 'name': ReconfigVM_Task, 'duration_secs': 0.485447} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.580562] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Reconfigured VM instance instance-00000047 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1835.587931] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f55f24ef-37e1-4562-be2c-cf2c15a2ad00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.608070] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e74f0238-4d31-4811-90c9-8ba72ffd7634 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.764s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.609784] env[62820]: DEBUG oslo_vmware.api [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1835.609784] env[62820]: value = "task-1696410" [ 1835.609784] env[62820]: _type = "Task" [ 1835.609784] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.622450] env[62820]: DEBUG oslo_vmware.api [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696410, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.762010] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943ea559-18c9-468f-8686-935947262a12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.770803] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6bc54ac-c155-483f-8bfd-aab9fcff857a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.806628] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb728c76-608d-49dd-997e-0af7ad611d18 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.814986] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0430bcda-0035-4d19-a080-e71251bda010 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.831970] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1835.850645] env[62820]: DEBUG nova.network.neutron [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Updating instance_info_cache with network_info: [{"id": "c61b3713-021f-484e-a5bf-16202f289715", "address": "fa:16:3e:f3:47:5f", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc61b3713-02", "ovs_interfaceid": "c61b3713-021f-484e-a5bf-16202f289715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.121132] env[62820]: DEBUG oslo_vmware.api [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696410, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.330621] env[62820]: DEBUG nova.compute.manager [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Received event network-vif-plugged-c61b3713-021f-484e-a5bf-16202f289715 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1836.330862] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] Acquiring lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.331794] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] Lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.331794] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] Lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.331794] env[62820]: DEBUG nova.compute.manager [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] No waiting events found dispatching network-vif-plugged-c61b3713-021f-484e-a5bf-16202f289715 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1836.332042] env[62820]: WARNING nova.compute.manager [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Received unexpected event network-vif-plugged-c61b3713-021f-484e-a5bf-16202f289715 for instance with vm_state building and task_state spawning. [ 1836.332263] env[62820]: DEBUG nova.compute.manager [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Received event network-changed-c61b3713-021f-484e-a5bf-16202f289715 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1836.332456] env[62820]: DEBUG nova.compute.manager [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Refreshing instance network info cache due to event network-changed-c61b3713-021f-484e-a5bf-16202f289715. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1836.332681] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] Acquiring lock "refresh_cache-3bff732c-9d4f-4dfa-8058-42c4dbde2efe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.334837] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1836.353566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "refresh_cache-3bff732c-9d4f-4dfa-8058-42c4dbde2efe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.353840] env[62820]: DEBUG nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Instance network_info: |[{"id": "c61b3713-021f-484e-a5bf-16202f289715", "address": "fa:16:3e:f3:47:5f", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc61b3713-02", "ovs_interfaceid": "c61b3713-021f-484e-a5bf-16202f289715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1836.354421] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] Acquired lock "refresh_cache-3bff732c-9d4f-4dfa-8058-42c4dbde2efe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.354600] env[62820]: DEBUG nova.network.neutron [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Refreshing network info cache for port c61b3713-021f-484e-a5bf-16202f289715 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1836.355741] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:47:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c61b3713-021f-484e-a5bf-16202f289715', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1836.363971] env[62820]: DEBUG oslo.service.loopingcall [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1836.365216] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1836.365334] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a58eb0e-0990-4eb7-a3d4-314cd8282897 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.390168] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1836.390168] env[62820]: value = "task-1696411" [ 1836.390168] env[62820]: _type = "Task" [ 1836.390168] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.402047] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.623588] env[62820]: DEBUG oslo_vmware.api [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696410, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.842859] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1836.843119] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.394s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.843444] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.707s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.843571] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.845666] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.324s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.846022] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.847926] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.931s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.848160] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.849964] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.928s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.850182] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.851993] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.852942] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Cleaning up deleted instances with incomplete migration {{(pid=62820) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11624}} [ 1836.893179] env[62820]: INFO nova.scheduler.client.report [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted allocations for instance a8486f52-998d-4308-813a-9c651e2eb093 [ 1836.899427] env[62820]: INFO nova.scheduler.client.report [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleted allocations for instance b44f0b18-cc2c-4208-ab54-d4cac8593b4d [ 1836.915457] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.936110] env[62820]: INFO nova.scheduler.client.report [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleted allocations for instance 392d8bca-2d8d-42c3-ba14-fc1387c75405 [ 1836.938063] env[62820]: INFO nova.scheduler.client.report [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Deleted allocations for instance 488900b2-d0c9-4437-9f0c-dfb2ea38cb71 [ 1837.010535] env[62820]: DEBUG nova.network.neutron [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [{"id": "b234cdf0-fffd-452d-a277-6df15c22fa06", "address": "fa:16:3e:e6:6e:bf", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb234cdf0-ff", "ovs_interfaceid": "b234cdf0-fffd-452d-a277-6df15c22fa06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.122650] env[62820]: DEBUG oslo_vmware.api [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696410, 'name': ReconfigVM_Task, 'duration_secs': 1.228753} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.122650] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353657', 'volume_id': 'a9165bc5-6390-450a-8758-45dec9de7f6a', 'name': 'volume-a9165bc5-6390-450a-8758-45dec9de7f6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3228cd34-2144-425a-aca6-400cb0991e43', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9165bc5-6390-450a-8758-45dec9de7f6a', 'serial': 'a9165bc5-6390-450a-8758-45dec9de7f6a'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1837.377320] env[62820]: DEBUG nova.network.neutron [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Updated VIF entry in instance network info cache for port c61b3713-021f-484e-a5bf-16202f289715. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1837.377742] env[62820]: DEBUG nova.network.neutron [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Updating instance_info_cache with network_info: [{"id": "c61b3713-021f-484e-a5bf-16202f289715", "address": "fa:16:3e:f3:47:5f", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc61b3713-02", "ovs_interfaceid": "c61b3713-021f-484e-a5bf-16202f289715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.412875] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.416565] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4e89b9cb-90d6-4090-a273-3a3ae9a1dc14 tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "b44f0b18-cc2c-4208-ab54-d4cac8593b4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.605s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.416565] env[62820]: DEBUG oslo_concurrency.lockutils [None req-24e95f86-9376-40ff-aebf-db84f987da3e tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "a8486f52-998d-4308-813a-9c651e2eb093" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.759s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.448798] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0aa17149-1729-46a1-b82d-b82a92e3cce5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "392d8bca-2d8d-42c3-ba14-fc1387c75405" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.122s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.452000] env[62820]: DEBUG oslo_concurrency.lockutils [None req-315fa777-0ddc-42a0-9eca-690208a06c1c tempest-ListServersNegativeTestJSON-1350971703 tempest-ListServersNegativeTestJSON-1350971703-project-member] Lock "488900b2-d0c9-4437-9f0c-dfb2ea38cb71" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.723s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.463865] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "a8803178-7fa3-42ea-824c-901063673062" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.464207] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.517674] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-b89d32f8-0675-4b0c-977e-b7900e62bdd8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.517955] env[62820]: DEBUG nova.objects.instance [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'migration_context' on Instance uuid b89d32f8-0675-4b0c-977e-b7900e62bdd8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1837.673225] env[62820]: DEBUG nova.objects.instance [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lazy-loading 'flavor' on Instance uuid 3228cd34-2144-425a-aca6-400cb0991e43 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1837.883364] env[62820]: DEBUG oslo_concurrency.lockutils [req-7d525cd3-a4a8-4f7c-be18-667af13f5605 req-398dffe6-e65c-406a-b30c-d2c0504b7e99 service nova] Releasing lock "refresh_cache-3bff732c-9d4f-4dfa-8058-42c4dbde2efe" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.908827] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.967354] env[62820]: DEBUG nova.compute.utils [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1838.021997] env[62820]: DEBUG nova.objects.base [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1838.022078] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d8fa4a-a468-42bf-b4b8-f13711f35a57 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.032035] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.032035] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.053019] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75eae315-bb8e-4e1c-ad2b-e207f8a63880 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.059620] env[62820]: DEBUG oslo_vmware.api [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1838.059620] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528febb2-8e8b-427a-3db1-eeed8eae1ecf" [ 1838.059620] env[62820]: _type = "Task" [ 1838.059620] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.071506] env[62820]: DEBUG oslo_vmware.api [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528febb2-8e8b-427a-3db1-eeed8eae1ecf, 'name': SearchDatastore_Task, 'duration_secs': 0.009427} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.071813] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.072079] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.407097] env[62820]: DEBUG nova.compute.manager [req-b3b98237-19f7-425d-a85e-837c02f3e1c3 req-72855386-d989-43e6-b9b7-d026f0e0bd56 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Received event network-changed-b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1838.407505] env[62820]: DEBUG nova.compute.manager [req-b3b98237-19f7-425d-a85e-837c02f3e1c3 req-72855386-d989-43e6-b9b7-d026f0e0bd56 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Refreshing instance network info cache due to event network-changed-b4b5b723-be36-401c-8214-964a362697b6. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1838.407505] env[62820]: DEBUG oslo_concurrency.lockutils [req-b3b98237-19f7-425d-a85e-837c02f3e1c3 req-72855386-d989-43e6-b9b7-d026f0e0bd56 service nova] Acquiring lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.407870] env[62820]: DEBUG oslo_concurrency.lockutils [req-b3b98237-19f7-425d-a85e-837c02f3e1c3 req-72855386-d989-43e6-b9b7-d026f0e0bd56 service nova] Acquired lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.408167] env[62820]: DEBUG nova.network.neutron [req-b3b98237-19f7-425d-a85e-837c02f3e1c3 req-72855386-d989-43e6-b9b7-d026f0e0bd56 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Refreshing network info cache for port b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1838.421610] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.469907] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.523983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.524572] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.538936] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.539135] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.539354] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.539515] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Cleaning up deleted instances {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11586}} [ 1838.679528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8dd913b2-9e2d-422e-a3dd-6faf254575a2 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.271s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.777700] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e84b24-6f40-445a-a840-a00f2bb4eaef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.789348] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f971e19-e784-4f59-be2b-70b31c464c84 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.819612] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b029ee4f-9511-47e0-945f-2bc27e49ff72 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.828123] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9888fe74-28a2-415f-8a84-491a471eff35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.842104] env[62820]: DEBUG nova.compute.provider_tree [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.915228] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.030563] env[62820]: DEBUG nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1839.058419] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] There are 58 instances to clean {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11595}} [ 1839.058614] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 986d44bb-3d5c-4d3e-a569-45cb1da5c88e] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1839.068053] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "3228cd34-2144-425a-aca6-400cb0991e43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.068053] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.068053] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "3228cd34-2144-425a-aca6-400cb0991e43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.068171] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.068264] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.071770] env[62820]: INFO nova.compute.manager [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Terminating instance [ 1839.345246] env[62820]: DEBUG nova.scheduler.client.report [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1839.417270] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.546161] env[62820]: DEBUG nova.network.neutron [req-b3b98237-19f7-425d-a85e-837c02f3e1c3 req-72855386-d989-43e6-b9b7-d026f0e0bd56 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updated VIF entry in instance network info cache for port b4b5b723-be36-401c-8214-964a362697b6. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1839.547231] env[62820]: DEBUG nova.network.neutron [req-b3b98237-19f7-425d-a85e-837c02f3e1c3 req-72855386-d989-43e6-b9b7-d026f0e0bd56 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updating instance_info_cache with network_info: [{"id": "b4b5b723-be36-401c-8214-964a362697b6", "address": "fa:16:3e:53:8d:e2", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4b5b723-be", "ovs_interfaceid": "b4b5b723-be36-401c-8214-964a362697b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.560981] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.563329] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b44f0b18-cc2c-4208-ab54-d4cac8593b4d] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1839.575137] env[62820]: DEBUG nova.compute.manager [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1839.575361] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1839.577445] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cd9e09-280b-4691-a86c-c229f1ddafad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.587043] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1839.587376] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-923daaa8-a624-4fbd-a2b6-7f956d5fcd83 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.589643] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "a8803178-7fa3-42ea-824c-901063673062" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.589871] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.590130] env[62820]: INFO nova.compute.manager [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Attaching volume 8d042475-114b-486b-830d-875d25458b64 to /dev/sdb [ 1839.600163] env[62820]: DEBUG oslo_vmware.api [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1839.600163] env[62820]: value = "task-1696412" [ 1839.600163] env[62820]: _type = "Task" [ 1839.600163] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.612773] env[62820]: DEBUG oslo_vmware.api [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.640326] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7435ae-022c-4fc5-a0d1-56b260e62c4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.648296] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afebcbb-383e-42ce-a3a4-27c491f86681 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.667392] env[62820]: DEBUG nova.virt.block_device [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating existing volume attachment record: c55e205e-343f-4196-9c92-2d84d7d4db62 {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1839.707746] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.708084] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.913723] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.052219] env[62820]: DEBUG oslo_concurrency.lockutils [req-b3b98237-19f7-425d-a85e-837c02f3e1c3 req-72855386-d989-43e6-b9b7-d026f0e0bd56 service nova] Releasing lock "refresh_cache-25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.067441] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 488900b2-d0c9-4437-9f0c-dfb2ea38cb71] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1840.118729] env[62820]: DEBUG oslo_vmware.api [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696412, 'name': PowerOffVM_Task, 'duration_secs': 0.313037} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.118729] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1840.118729] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1840.118729] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d422198b-939b-4d0b-975b-5651d4531882 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.211207] env[62820]: INFO nova.compute.manager [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Detaching volume 16c6f1fc-6f67-4e08-aab1-a64450c4d085 [ 1840.260982] env[62820]: INFO nova.virt.block_device [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Attempting to driver detach volume 16c6f1fc-6f67-4e08-aab1-a64450c4d085 from mountpoint /dev/sdb [ 1840.261270] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1840.261467] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353651', 'volume_id': '16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'name': 'volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '03b0abc8-dd32-4cf9-8750-d64b8a66695e', 'attached_at': '', 'detached_at': '', 'volume_id': '16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'serial': '16c6f1fc-6f67-4e08-aab1-a64450c4d085'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1840.262389] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3f2458-10b2-4f71-b68d-1176277146ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.286111] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8b1877-290c-48ec-9d21-df4729c02bf0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.294819] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4292b271-cf93-4bf9-8acd-058a6f1a172b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.322937] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8e0a02-ffa6-44ac-b5af-e50662dd460f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.338700] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] The volume has not been displaced from its original location: [datastore1] volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085/volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1840.344597] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1840.345129] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a749131e-8983-4036-9d1f-3ee992f7b751 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.363243] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.291s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.366446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.806s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.370067] env[62820]: INFO nova.compute.claims [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1840.377485] env[62820]: DEBUG oslo_vmware.api [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1840.377485] env[62820]: value = "task-1696417" [ 1840.377485] env[62820]: _type = "Task" [ 1840.377485] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.386813] env[62820]: DEBUG oslo_vmware.api [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.413322] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.574199] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 8d0e3ef5-55e9-4b4e-9252-4e3e921f4620] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1840.894854] env[62820]: DEBUG oslo_vmware.api [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696417, 'name': ReconfigVM_Task, 'duration_secs': 0.234519} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.895223] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1840.901604] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f88247f3-cf91-433b-8c8b-4fa77c34a5d1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.930116] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.933835] env[62820]: DEBUG oslo_vmware.api [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1840.933835] env[62820]: value = "task-1696418" [ 1840.933835] env[62820]: _type = "Task" [ 1840.933835] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.944747] env[62820]: DEBUG oslo_vmware.api [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696418, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.961638] env[62820]: INFO nova.scheduler.client.report [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted allocation for migration 70a6c233-a354-40ad-9625-8975d924b672 [ 1841.071192] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.071192] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.079545] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: f78bf828-b9ab-480e-bd58-3dd8587780ea] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1841.437049] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.450064] env[62820]: DEBUG oslo_vmware.api [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696418, 'name': ReconfigVM_Task, 'duration_secs': 0.208692} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.450416] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353651', 'volume_id': '16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'name': 'volume-16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '03b0abc8-dd32-4cf9-8750-d64b8a66695e', 'attached_at': '', 'detached_at': '', 'volume_id': '16c6f1fc-6f67-4e08-aab1-a64450c4d085', 'serial': '16c6f1fc-6f67-4e08-aab1-a64450c4d085'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1841.473830] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3efdb33b-be9a-458a-9ed7-7f8ed0e2141c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.919s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.574879] env[62820]: DEBUG nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1841.581475] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 392d8bca-2d8d-42c3-ba14-fc1387c75405] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1841.619884] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e451d83f-4e97-490f-a2c9-b5616ba93f4e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.628926] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d92a3a4-8dfa-4bcb-ae00-39510725b268 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.666956] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d91ac9-d3a7-4488-ad1c-aa15842196b6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.675075] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677e6c1b-7ed9-41fc-8160-fa4f0db5dab6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.689515] env[62820]: DEBUG nova.compute.provider_tree [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.929247] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.008313] env[62820]: DEBUG nova.objects.instance [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'flavor' on Instance uuid 03b0abc8-dd32-4cf9-8750-d64b8a66695e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1842.087272] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 6768101f-8d1d-46be-b0b9-2fdf6cba08da] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1842.105200] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.193115] env[62820]: DEBUG nova.scheduler.client.report [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1842.432280] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.592189] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: efe5ffe8-cd29-467d-85ad-d9e7d4eb9203] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1842.701591] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.701991] env[62820]: DEBUG nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1842.706473] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.600s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.707972] env[62820]: INFO nova.compute.claims [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1842.780352] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.780634] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.780841] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.781033] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.781211] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.784935] env[62820]: INFO nova.compute.manager [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Terminating instance [ 1842.931998] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.015719] env[62820]: DEBUG oslo_concurrency.lockutils [None req-56f7f7c5-cd6e-40ca-8ae9-90ff37625e6d tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.307s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.020834] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1843.021706] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1843.021823] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleting the datastore file [datastore1] 3228cd34-2144-425a-aca6-400cb0991e43 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1843.022416] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0a1f383-9230-42f4-ad88-a9bc5a7024d1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.032024] env[62820]: DEBUG oslo_vmware.api [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1843.032024] env[62820]: value = "task-1696420" [ 1843.032024] env[62820]: _type = "Task" [ 1843.032024] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.045578] env[62820]: DEBUG oslo_vmware.api [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.093987] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: a495b540-806d-4cd8-b340-86fe937867cd] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1843.213467] env[62820]: DEBUG nova.compute.utils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1843.222848] env[62820]: DEBUG nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1843.222848] env[62820]: DEBUG nova.network.neutron [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1843.277444] env[62820]: DEBUG nova.policy [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '183f339671f54844bee09459976816ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f214ed24ef014d32bfaea02a7174b912', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1843.289083] env[62820]: DEBUG nova.compute.manager [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1843.289348] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1843.290308] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eefc0f6-7646-42d1-834d-2c06d8883adb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.301342] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1843.301342] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db6ff151-98f1-403b-9234-08f6ddaddded {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.314033] env[62820]: DEBUG oslo_vmware.api [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1843.314033] env[62820]: value = "task-1696421" [ 1843.314033] env[62820]: _type = "Task" [ 1843.314033] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.322564] env[62820]: DEBUG oslo_vmware.api [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.441422] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696411, 'name': CreateVM_Task, 'duration_secs': 6.722813} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.441422] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1843.441422] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.441422] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.441422] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1843.441422] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e46a3eb-411b-4c22-80d9-0ebedf99a1bd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.446129] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1843.446129] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265967b-017d-449b-8e24-125be3872738" [ 1843.446129] env[62820]: _type = "Task" [ 1843.446129] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.456314] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5265967b-017d-449b-8e24-125be3872738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.543506] env[62820]: DEBUG oslo_vmware.api [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237259} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.543758] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1843.543942] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1843.544826] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1843.545049] env[62820]: INFO nova.compute.manager [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Took 3.97 seconds to destroy the instance on the hypervisor. [ 1843.545316] env[62820]: DEBUG oslo.service.loopingcall [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1843.545560] env[62820]: DEBUG nova.compute.manager [-] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1843.545599] env[62820]: DEBUG nova.network.neutron [-] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1843.598050] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4ea2be66-06b4-4519-82b0-c2b1df329a5a] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1843.698356] env[62820]: DEBUG nova.network.neutron [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Successfully created port: d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1843.723678] env[62820]: DEBUG nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1843.829323] env[62820]: DEBUG oslo_vmware.api [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696421, 'name': PowerOffVM_Task, 'duration_secs': 0.195596} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.830193] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1843.830193] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1843.830885] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52b96777-e74f-4b96-bc5d-ff3285d5e480 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.920485] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1843.920485] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1843.920485] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleting the datastore file [datastore1] b89d32f8-0675-4b0c-977e-b7900e62bdd8 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1843.920875] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27bc4180-f0f1-4e69-b866-7bd7f2b09d10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.928271] env[62820]: DEBUG oslo_vmware.api [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1843.928271] env[62820]: value = "task-1696423" [ 1843.928271] env[62820]: _type = "Task" [ 1843.928271] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.942145] env[62820]: DEBUG oslo_vmware.api [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.967442] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5265967b-017d-449b-8e24-125be3872738, 'name': SearchDatastore_Task, 'duration_secs': 0.011819} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.971285] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.971285] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1843.971285] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.971285] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.971285] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1843.971285] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18c9dfa7-7be8-4c0b-a189-d7bce9f86488 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.974011] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfa077a-b8be-49fd-b652-6a9d3998e495 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.984969] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b46b5b-b1b1-47aa-b414-0651ece30489 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.019257] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85f3621-84cf-4078-93cc-714465b68e76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.022426] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1844.022426] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1844.023291] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-645f97e8-5a61-44aa-83f8-e04d07f6e5e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.030298] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1844.030298] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528ec33c-d6ff-d4e6-23f9-0eb474aec820" [ 1844.030298] env[62820]: _type = "Task" [ 1844.030298] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.037735] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d565b701-a1b6-4fe0-80a8-db388c6e2c94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.047867] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528ec33c-d6ff-d4e6-23f9-0eb474aec820, 'name': SearchDatastore_Task, 'duration_secs': 0.011526} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.060057] env[62820]: DEBUG nova.compute.provider_tree [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1844.061023] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49034dc5-f470-40d4-95a2-6bbd105e283b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.068829] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1844.068829] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]524a4c75-6d93-7a67-0bab-62bd2624cda8" [ 1844.068829] env[62820]: _type = "Task" [ 1844.068829] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.078791] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524a4c75-6d93-7a67-0bab-62bd2624cda8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.103682] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 35b95400-6399-48ae-b7d5-420c33d653dd] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1844.152230] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquiring lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.152552] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.235511] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1844.235738] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353674', 'volume_id': '8d042475-114b-486b-830d-875d25458b64', 'name': 'volume-8d042475-114b-486b-830d-875d25458b64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a8803178-7fa3-42ea-824c-901063673062', 'attached_at': '', 'detached_at': '', 'volume_id': '8d042475-114b-486b-830d-875d25458b64', 'serial': '8d042475-114b-486b-830d-875d25458b64'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1844.236157] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.236412] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.236552] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.236723] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.236896] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.239099] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e33431-1446-4d46-b49a-6e23f4983a0d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.242114] env[62820]: INFO nova.compute.manager [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Terminating instance [ 1844.264573] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3294fb-460b-4765-9a01-5b4b7d1e45e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.299856] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] volume-8d042475-114b-486b-830d-875d25458b64/volume-8d042475-114b-486b-830d-875d25458b64.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1844.300941] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec745f5f-f9cd-4705-8a25-77652088bb2e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.322887] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1844.322887] env[62820]: value = "task-1696424" [ 1844.322887] env[62820]: _type = "Task" [ 1844.322887] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.334257] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696424, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.441021] env[62820]: DEBUG oslo_vmware.api [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176935} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.441021] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1844.441021] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1844.441021] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1844.441021] env[62820]: INFO nova.compute.manager [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1844.441021] env[62820]: DEBUG oslo.service.loopingcall [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1844.441021] env[62820]: DEBUG nova.compute.manager [-] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1844.441021] env[62820]: DEBUG nova.network.neutron [-] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1844.565682] env[62820]: DEBUG nova.scheduler.client.report [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1844.584641] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]524a4c75-6d93-7a67-0bab-62bd2624cda8, 'name': SearchDatastore_Task, 'duration_secs': 0.010255} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.585197] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.585742] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3bff732c-9d4f-4dfa-8058-42c4dbde2efe/3bff732c-9d4f-4dfa-8058-42c4dbde2efe.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1844.586252] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f3fd206-145e-4d25-8855-a4ff34e87f0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.596821] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1844.596821] env[62820]: value = "task-1696425" [ 1844.596821] env[62820]: _type = "Task" [ 1844.596821] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.606726] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 9c0d9676-9db9-4be2-a8e6-84bd816234aa] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1844.615574] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.656099] env[62820]: DEBUG nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1844.745865] env[62820]: DEBUG nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1844.751148] env[62820]: DEBUG nova.compute.manager [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1844.751148] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1844.751685] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299a5fc6-e291-4254-bc7c-e0f68621dd4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.760155] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.761038] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-264fd38d-f652-4e10-a3ae-37ca3fb859c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.770042] env[62820]: DEBUG oslo_vmware.api [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1844.770042] env[62820]: value = "task-1696426" [ 1844.770042] env[62820]: _type = "Task" [ 1844.770042] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.777533] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1844.777843] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1844.778066] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1844.779157] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1844.779395] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1844.779622] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1844.779901] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1844.780166] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1844.780416] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1844.780696] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1844.781936] env[62820]: DEBUG nova.virt.hardware [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1844.782754] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcbfc18-c56e-4725-bc78-982510ac6a30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.794964] env[62820]: DEBUG oslo_vmware.api [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.798637] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cf28dd-0665-40a6-a48b-f580f2d33c73 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.836083] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.856038] env[62820]: DEBUG nova.compute.manager [req-82048c21-a891-432f-bfe7-f9669dec3726 req-591d2a18-00b4-459d-9bc9-12a3d40f7c07 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Received event network-vif-deleted-1c306539-7756-458b-84e7-61bfbc0c7f35 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1844.856038] env[62820]: INFO nova.compute.manager [req-82048c21-a891-432f-bfe7-f9669dec3726 req-591d2a18-00b4-459d-9bc9-12a3d40f7c07 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Neutron deleted interface 1c306539-7756-458b-84e7-61bfbc0c7f35; detaching it from the instance and deleting it from the info cache [ 1844.856038] env[62820]: DEBUG nova.network.neutron [req-82048c21-a891-432f-bfe7-f9669dec3726 req-591d2a18-00b4-459d-9bc9-12a3d40f7c07 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.078815] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.079151] env[62820]: DEBUG nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1845.107721] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696425, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.116436] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4323e7df-136f-4bbe-8160-fd7b2579727e] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1845.181125] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.181403] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.182960] env[62820]: INFO nova.compute.claims [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1845.190540] env[62820]: DEBUG nova.network.neutron [-] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.280205] env[62820]: DEBUG oslo_vmware.api [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696426, 'name': PowerOffVM_Task, 'duration_secs': 0.250971} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.280504] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1845.280675] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1845.280927] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f5ecc9d-4486-4515-83d6-e41d55914ddb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.336940] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696424, 'name': ReconfigVM_Task, 'duration_secs': 0.676621} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.337259] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfigured VM instance instance-00000066 to attach disk [datastore1] volume-8d042475-114b-486b-830d-875d25458b64/volume-8d042475-114b-486b-830d-875d25458b64.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1845.342562] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd8e3da8-252c-43a2-9daa-af93e04d56d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.358188] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92136d0b-0418-4c21-a440-48896f1b37f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.362286] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1845.362286] env[62820]: value = "task-1696428" [ 1845.362286] env[62820]: _type = "Task" [ 1845.362286] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.371032] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f402dd-c2ca-45fd-9d98-dbe9f29164b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.391147] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696428, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.391685] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1845.391894] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1845.392373] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleting the datastore file [datastore1] 03b0abc8-dd32-4cf9-8750-d64b8a66695e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1845.393295] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4476332-ee3c-483e-8109-70314fab65c2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.402270] env[62820]: DEBUG oslo_vmware.api [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1845.402270] env[62820]: value = "task-1696429" [ 1845.402270] env[62820]: _type = "Task" [ 1845.402270] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.412012] env[62820]: DEBUG oslo_vmware.api [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.420605] env[62820]: DEBUG nova.compute.manager [req-82048c21-a891-432f-bfe7-f9669dec3726 req-591d2a18-00b4-459d-9bc9-12a3d40f7c07 service nova] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Detach interface failed, port_id=1c306539-7756-458b-84e7-61bfbc0c7f35, reason: Instance 3228cd34-2144-425a-aca6-400cb0991e43 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1845.585716] env[62820]: DEBUG nova.compute.utils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1845.587335] env[62820]: DEBUG nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1845.587521] env[62820]: DEBUG nova.network.neutron [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1845.609586] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696425, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515142} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.610060] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 3bff732c-9d4f-4dfa-8058-42c4dbde2efe/3bff732c-9d4f-4dfa-8058-42c4dbde2efe.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1845.610060] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1845.611181] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dadd3fe7-a897-49ec-a36a-074621ee3dc7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.615955] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.616220] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.623654] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 82379c63-8dce-4b61-afb9-9b6a5ff605b5] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1845.626699] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1845.626699] env[62820]: value = "task-1696430" [ 1845.626699] env[62820]: _type = "Task" [ 1845.626699] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.636685] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.652938] env[62820]: DEBUG nova.policy [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '134e01f94e1e49cba6b909dd3e81715d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfe9869537de4334a0c8ce91fd062659', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1845.691457] env[62820]: INFO nova.compute.manager [-] [instance: 3228cd34-2144-425a-aca6-400cb0991e43] Took 2.15 seconds to deallocate network for instance. [ 1845.735724] env[62820]: DEBUG nova.network.neutron [-] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.876533] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696428, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.915148] env[62820]: DEBUG oslo_vmware.api [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155954} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.915415] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1845.915594] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1845.915765] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1845.915934] env[62820]: INFO nova.compute.manager [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1845.916592] env[62820]: DEBUG oslo.service.loopingcall [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.916592] env[62820]: DEBUG nova.compute.manager [-] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1845.916592] env[62820]: DEBUG nova.network.neutron [-] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1846.091058] env[62820]: DEBUG nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1846.118724] env[62820]: DEBUG nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1846.128208] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 46217ada-3fab-4dbc-a65e-a3b8e856918d] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1846.140865] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06858} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.140865] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1846.141515] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90288261-f995-404f-8b82-e616f2b0faaa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.168415] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 3bff732c-9d4f-4dfa-8058-42c4dbde2efe/3bff732c-9d4f-4dfa-8058-42c4dbde2efe.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1846.169412] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d3546d4-02ac-40f9-bc5b-00fb5661cc79 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.202119] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.206802] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1846.206802] env[62820]: value = "task-1696431" [ 1846.206802] env[62820]: _type = "Task" [ 1846.206802] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.217117] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696431, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.226858] env[62820]: DEBUG nova.network.neutron [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Successfully updated port: d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1846.238876] env[62820]: INFO nova.compute.manager [-] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Took 1.80 seconds to deallocate network for instance. [ 1846.327768] env[62820]: DEBUG nova.network.neutron [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Successfully created port: 7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1846.378137] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696428, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.447683] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f718f49-f47b-48c0-a3e1-44895fc8d321 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.456228] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccee8837-4f2d-4513-8701-734ea4a0d235 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.489706] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db3ac1c-9065-480a-bf0a-f76148b2fa03 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.498552] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac19183-b75d-4740-b570-674bd790e6ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.515654] env[62820]: DEBUG nova.compute.provider_tree [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1846.635118] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4f3fcdcc-90ac-4cc2-8c1c-30badbbf4aad] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1846.643992] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.717847] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696431, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.729685] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.729787] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.729908] env[62820]: DEBUG nova.network.neutron [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1846.745528] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.878738] env[62820]: DEBUG oslo_vmware.api [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696428, 'name': ReconfigVM_Task, 'duration_secs': 1.178631} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.879092] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353674', 'volume_id': '8d042475-114b-486b-830d-875d25458b64', 'name': 'volume-8d042475-114b-486b-830d-875d25458b64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a8803178-7fa3-42ea-824c-901063673062', 'attached_at': '', 'detached_at': '', 'volume_id': '8d042475-114b-486b-830d-875d25458b64', 'serial': '8d042475-114b-486b-830d-875d25458b64'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1846.888599] env[62820]: DEBUG nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: b89d32f8-0675-4b0c-977e-b7900e62bdd8] Received event network-vif-deleted-b234cdf0-fffd-452d-a277-6df15c22fa06 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1846.888815] env[62820]: DEBUG nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Received event network-vif-plugged-d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1846.888895] env[62820]: DEBUG oslo_concurrency.lockutils [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.889229] env[62820]: DEBUG oslo_concurrency.lockutils [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] Lock "901626d2-1788-4017-b0c7-52537618804c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.889375] env[62820]: DEBUG oslo_concurrency.lockutils [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] Lock "901626d2-1788-4017-b0c7-52537618804c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.889592] env[62820]: DEBUG nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] No waiting events found dispatching network-vif-plugged-d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1846.889811] env[62820]: WARNING nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Received unexpected event network-vif-plugged-d3537ab9-0a82-437a-83c1-ffb18a60490a for instance with vm_state building and task_state spawning. [ 1846.890056] env[62820]: DEBUG nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Received event network-changed-d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1846.890248] env[62820]: DEBUG nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Refreshing instance network info cache due to event network-changed-d3537ab9-0a82-437a-83c1-ffb18a60490a. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1846.890421] env[62820]: DEBUG oslo_concurrency.lockutils [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] Acquiring lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.018894] env[62820]: DEBUG nova.scheduler.client.report [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1847.100186] env[62820]: DEBUG nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1847.129488] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1847.129951] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1847.130239] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1847.130551] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1847.130835] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1847.131127] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1847.131449] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1847.131718] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1847.132129] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1847.132545] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1847.133027] env[62820]: DEBUG nova.virt.hardware [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1847.134036] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a33a5c-a60c-4cb4-9923-0f39974725ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.138490] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 9acf0d8f-2daa-4c3a-9ac0-a1be12e56369] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1847.145066] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d317cb-0589-4eb6-95b3-bf9addd488ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.219920] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696431, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.261375] env[62820]: DEBUG nova.network.neutron [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1847.282189] env[62820]: DEBUG nova.network.neutron [-] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.401990] env[62820]: DEBUG nova.network.neutron [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updating instance_info_cache with network_info: [{"id": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "address": "fa:16:3e:96:64:a6", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3537ab9-0a", "ovs_interfaceid": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.524550] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.525225] env[62820]: DEBUG nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1847.528087] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.326s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.528320] env[62820]: DEBUG nova.objects.instance [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lazy-loading 'resources' on Instance uuid 3228cd34-2144-425a-aca6-400cb0991e43 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1847.645344] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 76bd4a09-300d-460e-8442-21b4f6567698] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1847.719771] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696431, 'name': ReconfigVM_Task, 'duration_secs': 1.485039} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.720080] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 3bff732c-9d4f-4dfa-8058-42c4dbde2efe/3bff732c-9d4f-4dfa-8058-42c4dbde2efe.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1847.720718] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15513f81-c709-4d06-8001-289ba92d03cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.737522] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1847.737522] env[62820]: value = "task-1696432" [ 1847.737522] env[62820]: _type = "Task" [ 1847.737522] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.746547] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696432, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.786392] env[62820]: INFO nova.compute.manager [-] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Took 1.87 seconds to deallocate network for instance. [ 1847.906945] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Releasing lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.907459] env[62820]: DEBUG nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Instance network_info: |[{"id": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "address": "fa:16:3e:96:64:a6", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3537ab9-0a", "ovs_interfaceid": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1847.907906] env[62820]: DEBUG oslo_concurrency.lockutils [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] Acquired lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.908289] env[62820]: DEBUG nova.network.neutron [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Refreshing network info cache for port d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1847.909811] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:64:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ff3ecd2f-0b10-4faf-a512-fd7a20c28df1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3537ab9-0a82-437a-83c1-ffb18a60490a', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1847.927909] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Creating folder: Project (f214ed24ef014d32bfaea02a7174b912). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1847.933927] env[62820]: DEBUG nova.objects.instance [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'flavor' on Instance uuid a8803178-7fa3-42ea-824c-901063673062 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1847.936015] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-006cc3c7-d5f6-4bf4-bf0a-c7659c9fc7df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.953814] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Created folder: Project (f214ed24ef014d32bfaea02a7174b912) in parent group-v353379. [ 1847.953887] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Creating folder: Instances. Parent ref: group-v353675. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1847.954399] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c53b5107-e3bf-447e-96b1-14c3748868ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.966476] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Created folder: Instances in parent group-v353675. [ 1847.966744] env[62820]: DEBUG oslo.service.loopingcall [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1847.966940] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 901626d2-1788-4017-b0c7-52537618804c] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1847.967582] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-455e6c80-f17c-4b85-a99a-da05ed0ffbeb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.989255] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1847.989255] env[62820]: value = "task-1696435" [ 1847.989255] env[62820]: _type = "Task" [ 1847.989255] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.001466] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696435, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.035748] env[62820]: DEBUG nova.compute.utils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1848.036294] env[62820]: DEBUG nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1848.036474] env[62820]: DEBUG nova.network.neutron [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1848.149967] env[62820]: DEBUG nova.policy [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88adfa69ba5c4168a8f9c7da59f72f6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf2b1ea01551466fb52c3770dc8f615f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1848.151658] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 860637a2-8c59-42af-a9f5-4e80c5466274] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1848.243505] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20047fef-4d1e-452a-b4dd-8a63de04ba1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.256634] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc473b8-804a-48a2-8423-2c379621e252 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.260157] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696432, 'name': Rename_Task, 'duration_secs': 0.177182} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.260454] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1848.261120] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da39f955-0ba6-4953-bf14-1259a0fa7e4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.298108] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.301107] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cd7311-216d-43b4-ae6c-a0c76d0ed562 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.304071] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1848.304071] env[62820]: value = "task-1696436" [ 1848.304071] env[62820]: _type = "Task" [ 1848.304071] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.316587] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81b578c-4a57-4a3b-9532-e2bc84fc55dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.324243] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696436, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.335469] env[62820]: DEBUG nova.compute.provider_tree [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.410743] env[62820]: DEBUG nova.network.neutron [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updated VIF entry in instance network info cache for port d3537ab9-0a82-437a-83c1-ffb18a60490a. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1848.411151] env[62820]: DEBUG nova.network.neutron [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updating instance_info_cache with network_info: [{"id": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "address": "fa:16:3e:96:64:a6", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3537ab9-0a", "ovs_interfaceid": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.427748] env[62820]: DEBUG nova.network.neutron [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Successfully updated port: 7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1848.441530] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcd2b9c4-3b71-4d14-93af-3087098eb3f3 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.851s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.500959] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696435, 'name': CreateVM_Task, 'duration_secs': 0.402068} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.501138] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 901626d2-1788-4017-b0c7-52537618804c] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1848.501814] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1848.501975] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1848.502313] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1848.502564] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8487749e-8011-4f48-8b48-6e27813ee0fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.507307] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1848.507307] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521ed160-da6f-137a-5b30-e8d8c6f99771" [ 1848.507307] env[62820]: _type = "Task" [ 1848.507307] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.517545] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521ed160-da6f-137a-5b30-e8d8c6f99771, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.539518] env[62820]: DEBUG nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1848.584972] env[62820]: DEBUG nova.network.neutron [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Successfully created port: 26b4189b-0a18-4e7c-b07b-c63278a422e5 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1848.655023] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 8a105764-ebd9-4c0a-b555-c5fd5ea8684d] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1848.817142] env[62820]: DEBUG oslo_vmware.api [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696436, 'name': PowerOnVM_Task, 'duration_secs': 0.468144} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.821024] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1848.821024] env[62820]: INFO nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Took 15.18 seconds to spawn the instance on the hypervisor. [ 1848.821024] env[62820]: DEBUG nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1848.821024] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2881b21d-1a81-4a00-a12c-e6cc3d52defe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.844704] env[62820]: DEBUG nova.scheduler.client.report [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1848.853058] env[62820]: DEBUG nova.compute.manager [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1848.915034] env[62820]: DEBUG oslo_concurrency.lockutils [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] Releasing lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.915347] env[62820]: DEBUG nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Received event network-vif-deleted-a3b6a7be-3800-4d75-9bf0-003542502fcb {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1848.915555] env[62820]: INFO nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Neutron deleted interface a3b6a7be-3800-4d75-9bf0-003542502fcb; detaching it from the instance and deleting it from the info cache [ 1848.915737] env[62820]: DEBUG nova.network.neutron [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.931118] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1848.931300] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1848.931460] env[62820]: DEBUG nova.network.neutron [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1848.939412] env[62820]: DEBUG nova.compute.manager [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received event network-vif-plugged-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1848.939578] env[62820]: DEBUG oslo_concurrency.lockutils [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.939818] env[62820]: DEBUG oslo_concurrency.lockutils [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.939990] env[62820]: DEBUG oslo_concurrency.lockutils [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.940174] env[62820]: DEBUG nova.compute.manager [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] No waiting events found dispatching network-vif-plugged-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1848.940368] env[62820]: WARNING nova.compute.manager [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received unexpected event network-vif-plugged-7f7affc8-f587-4484-9eef-211d6ea80226 for instance with vm_state building and task_state spawning. [ 1848.940523] env[62820]: DEBUG nova.compute.manager [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received event network-changed-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1848.940674] env[62820]: DEBUG nova.compute.manager [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Refreshing instance network info cache due to event network-changed-7f7affc8-f587-4484-9eef-211d6ea80226. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1848.940834] env[62820]: DEBUG oslo_concurrency.lockutils [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] Acquiring lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.018910] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521ed160-da6f-137a-5b30-e8d8c6f99771, 'name': SearchDatastore_Task, 'duration_secs': 0.00988} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.019265] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1849.019565] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1849.019811] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.019961] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.020179] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1849.020459] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab53f78b-cc71-4440-8b2e-7d78c310d732 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.039292] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1849.039491] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1849.040264] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1b020dc-99ae-4299-bbff-b5c7bdf0d146 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.049523] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1849.049523] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5288828a-fa87-f033-c501-f3334b20b8cb" [ 1849.049523] env[62820]: _type = "Task" [ 1849.049523] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.057407] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5288828a-fa87-f033-c501-f3334b20b8cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.159025] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0774673f-e7f2-46ce-b9ec-8fadb36ce192] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1849.334689] env[62820]: INFO nova.compute.manager [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Took 21.36 seconds to build instance. [ 1849.351284] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.823s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.353505] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.710s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.354948] env[62820]: INFO nova.compute.claims [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1849.372345] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.373248] env[62820]: INFO nova.scheduler.client.report [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleted allocations for instance 3228cd34-2144-425a-aca6-400cb0991e43 [ 1849.418308] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-834a3f43-66b9-4ede-ba65-76b9e030b8a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.435668] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d34cd0-51b2-48d9-8ac4-bfc661dbaf52 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.486148] env[62820]: DEBUG nova.compute.manager [req-573afee5-2050-43dd-9e65-2295ee593f46 req-5d779357-9b0a-46ce-bc4d-a99012e6c4e9 service nova] [instance: 03b0abc8-dd32-4cf9-8750-d64b8a66695e] Detach interface failed, port_id=a3b6a7be-3800-4d75-9bf0-003542502fcb, reason: Instance 03b0abc8-dd32-4cf9-8750-d64b8a66695e could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1849.497872] env[62820]: DEBUG nova.network.neutron [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1849.551042] env[62820]: DEBUG nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1849.563510] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5288828a-fa87-f033-c501-f3334b20b8cb, 'name': SearchDatastore_Task, 'duration_secs': 0.029961} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.564313] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f5c3dec-6003-44f6-a2ac-e47b27f66cb9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.569813] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1849.569813] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52acb961-8e79-2a58-e807-d9c14468e15a" [ 1849.569813] env[62820]: _type = "Task" [ 1849.569813] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.580591] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1849.580840] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1849.581012] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1849.581201] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1849.581353] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1849.581500] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1849.581708] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1849.581887] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1849.582037] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1849.582210] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1849.582388] env[62820]: DEBUG nova.virt.hardware [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1849.583157] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157359fd-68c7-4e81-9b3b-94e60f9519f8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.588560] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52acb961-8e79-2a58-e807-d9c14468e15a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.593872] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6397a69-f561-42dc-b184-359c93b6371f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.662254] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 53ba381a-9f81-4c37-8758-af56fc165dd7] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1849.680370] env[62820]: DEBUG nova.network.neutron [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [{"id": "7f7affc8-f587-4484-9eef-211d6ea80226", "address": "fa:16:3e:e1:6f:92", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7affc8-f5", "ovs_interfaceid": "7f7affc8-f587-4484-9eef-211d6ea80226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.837028] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6053ba84-2fa5-4680-a54b-b084cd1fc0e6 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.873s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.880915] env[62820]: DEBUG oslo_concurrency.lockutils [None req-8f3a40c1-d602-44b5-826e-da82966054c8 tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "3228cd34-2144-425a-aca6-400cb0991e43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.813s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.084484] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52acb961-8e79-2a58-e807-d9c14468e15a, 'name': SearchDatastore_Task, 'duration_secs': 0.012688} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.084754] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.085026] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 901626d2-1788-4017-b0c7-52537618804c/901626d2-1788-4017-b0c7-52537618804c.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1850.085317] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c50de03e-5547-4a4e-9bf4-e4f74c4db5f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.094264] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1850.094264] env[62820]: value = "task-1696437" [ 1850.094264] env[62820]: _type = "Task" [ 1850.094264] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.100641] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.166502] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 29d0af9e-06f5-46e6-9dc4-9c90eb1b32f8] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1850.183780] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.184189] env[62820]: DEBUG nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Instance network_info: |[{"id": "7f7affc8-f587-4484-9eef-211d6ea80226", "address": "fa:16:3e:e1:6f:92", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7affc8-f5", "ovs_interfaceid": "7f7affc8-f587-4484-9eef-211d6ea80226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1850.184558] env[62820]: DEBUG oslo_concurrency.lockutils [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] Acquired lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.184766] env[62820]: DEBUG nova.network.neutron [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Refreshing network info cache for port 7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1850.189021] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:6f:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f7affc8-f587-4484-9eef-211d6ea80226', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1850.200608] env[62820]: DEBUG oslo.service.loopingcall [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1850.202580] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1850.202864] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ce8a8cb-1946-4a9d-9b3e-f3e8514b815b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.230095] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1850.230095] env[62820]: value = "task-1696438" [ 1850.230095] env[62820]: _type = "Task" [ 1850.230095] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.241959] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696438, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.496315] env[62820]: DEBUG nova.network.neutron [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Successfully updated port: 26b4189b-0a18-4e7c-b07b-c63278a422e5 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1850.606814] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696437, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474637} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.606814] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 901626d2-1788-4017-b0c7-52537618804c/901626d2-1788-4017-b0c7-52537618804c.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1850.606972] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1850.607260] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f29e90f-3e59-4dfd-9d06-75bbf47686ef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.613760] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1850.613760] env[62820]: value = "task-1696439" [ 1850.613760] env[62820]: _type = "Task" [ 1850.613760] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.615623] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6c9b0a-3d4e-4dc0-92dd-2d29f5f0a100 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.629249] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6d55da-da40-404f-a6eb-e91cb71267a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.634046] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.661616] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fded960-9ad7-47c8-b945-8ebef63882d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.669582] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1099ff-9b03-473b-bcbf-ffbf8e3fbcfc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.673998] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 78d9c7ad-af34-4e84-bd0c-d0bf287be0ea] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1850.685686] env[62820]: DEBUG nova.compute.provider_tree [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1850.744337] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696438, 'name': CreateVM_Task, 'duration_secs': 0.395338} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.744507] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1850.745395] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.745582] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.745899] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1850.746176] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07c14557-450f-434b-9e69-31227f314819 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.750759] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1850.750759] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5239c8f0-4832-0218-4024-9614efa5686b" [ 1850.750759] env[62820]: _type = "Task" [ 1850.750759] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.758927] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5239c8f0-4832-0218-4024-9614efa5686b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.960251] env[62820]: DEBUG nova.network.neutron [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updated VIF entry in instance network info cache for port 7f7affc8-f587-4484-9eef-211d6ea80226. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1850.960703] env[62820]: DEBUG nova.network.neutron [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [{"id": "7f7affc8-f587-4484-9eef-211d6ea80226", "address": "fa:16:3e:e1:6f:92", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7affc8-f5", "ovs_interfaceid": "7f7affc8-f587-4484-9eef-211d6ea80226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.970473] env[62820]: DEBUG nova.compute.manager [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Received event network-vif-plugged-26b4189b-0a18-4e7c-b07b-c63278a422e5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1850.970473] env[62820]: DEBUG oslo_concurrency.lockutils [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] Acquiring lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.970785] env[62820]: DEBUG oslo_concurrency.lockutils [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] Lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.970967] env[62820]: DEBUG oslo_concurrency.lockutils [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] Lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.971158] env[62820]: DEBUG nova.compute.manager [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] No waiting events found dispatching network-vif-plugged-26b4189b-0a18-4e7c-b07b-c63278a422e5 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1850.971768] env[62820]: WARNING nova.compute.manager [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Received unexpected event network-vif-plugged-26b4189b-0a18-4e7c-b07b-c63278a422e5 for instance with vm_state building and task_state spawning. [ 1850.971768] env[62820]: DEBUG nova.compute.manager [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Received event network-changed-26b4189b-0a18-4e7c-b07b-c63278a422e5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1850.971768] env[62820]: DEBUG nova.compute.manager [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Refreshing instance network info cache due to event network-changed-26b4189b-0a18-4e7c-b07b-c63278a422e5. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1850.971914] env[62820]: DEBUG oslo_concurrency.lockutils [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] Acquiring lock "refresh_cache-d519c4e7-0d47-4643-8c31-acb2f6ee38b2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.972053] env[62820]: DEBUG oslo_concurrency.lockutils [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] Acquired lock "refresh_cache-d519c4e7-0d47-4643-8c31-acb2f6ee38b2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.972207] env[62820]: DEBUG nova.network.neutron [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Refreshing network info cache for port 26b4189b-0a18-4e7c-b07b-c63278a422e5 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1851.000682] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquiring lock "refresh_cache-d519c4e7-0d47-4643-8c31-acb2f6ee38b2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.126596] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075149} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.126596] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1851.127218] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44284690-ce69-4841-b0dc-956bc9bbeb03 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.148494] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 901626d2-1788-4017-b0c7-52537618804c/901626d2-1788-4017-b0c7-52537618804c.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1851.148759] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36f00305-0adb-4344-9c08-066d371e66f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.168285] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1851.168285] env[62820]: value = "task-1696441" [ 1851.168285] env[62820]: _type = "Task" [ 1851.168285] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.176352] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696441, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.176753] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 10f4cf46-89d2-4ac4-91d5-6626212f4f8e] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1851.188835] env[62820]: DEBUG nova.scheduler.client.report [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1851.263010] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5239c8f0-4832-0218-4024-9614efa5686b, 'name': SearchDatastore_Task, 'duration_secs': 0.008388} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.263010] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.263302] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1851.263487] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.263651] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.263854] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1851.264202] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5df6cc9-796f-4881-806b-6e1e19fa7284 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.273741] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1851.274019] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1851.275014] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e74eb4b-b412-4f4d-9b0b-fb741d4dd6c9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.282071] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1851.282071] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5243496b-3e7d-eba0-1716-22a7f25b2bb2" [ 1851.282071] env[62820]: _type = "Task" [ 1851.282071] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.289120] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5243496b-3e7d-eba0-1716-22a7f25b2bb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.463336] env[62820]: DEBUG oslo_concurrency.lockutils [req-bac6a9d6-91c7-43e9-ae61-73db02d8748f req-8bcbd9e7-d74d-47a6-85e3-60fc3fd35b33 service nova] Releasing lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.480521] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.480854] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.481155] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.481368] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.481573] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.484178] env[62820]: INFO nova.compute.manager [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Terminating instance [ 1851.512561] env[62820]: DEBUG nova.network.neutron [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1851.585307] env[62820]: DEBUG nova.network.neutron [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.678304] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696441, 'name': ReconfigVM_Task, 'duration_secs': 0.410621} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.678633] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 901626d2-1788-4017-b0c7-52537618804c/901626d2-1788-4017-b0c7-52537618804c.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1851.679341] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 5fbb6021-ca7d-4cce-90c9-113b7d833d49] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1851.681041] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40d9173b-e762-434c-9d17-b6e60c379575 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.689166] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1851.689166] env[62820]: value = "task-1696442" [ 1851.689166] env[62820]: _type = "Task" [ 1851.689166] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.693668] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.694080] env[62820]: DEBUG nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1851.701976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.956s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.701976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.704044] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.406s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.704044] env[62820]: DEBUG nova.objects.instance [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'resources' on Instance uuid 03b0abc8-dd32-4cf9-8750-d64b8a66695e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1851.704859] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696442, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.738056] env[62820]: INFO nova.scheduler.client.report [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted allocations for instance b89d32f8-0675-4b0c-977e-b7900e62bdd8 [ 1851.792907] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5243496b-3e7d-eba0-1716-22a7f25b2bb2, 'name': SearchDatastore_Task, 'duration_secs': 0.015806} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.794019] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0c4c998-4b93-474f-ae15-7460f05461dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.799638] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1851.799638] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5211ad44-e0ef-a41c-b6e1-662bff7149be" [ 1851.799638] env[62820]: _type = "Task" [ 1851.799638] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.808211] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5211ad44-e0ef-a41c-b6e1-662bff7149be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.989531] env[62820]: DEBUG nova.compute.manager [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1851.989881] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1851.990845] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87519c41-04d6-4e8b-8005-69a6699770fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.998946] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1851.999245] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f7e123d-c2bd-4235-a8ff-96214507e538 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.007192] env[62820]: DEBUG oslo_vmware.api [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1852.007192] env[62820]: value = "task-1696443" [ 1852.007192] env[62820]: _type = "Task" [ 1852.007192] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.014820] env[62820]: DEBUG oslo_vmware.api [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.087888] env[62820]: DEBUG oslo_concurrency.lockutils [req-4ad35e6f-d220-43ef-8c0d-70a865712a88 req-c0313934-3dca-4df3-a384-0ac3874f45e0 service nova] Releasing lock "refresh_cache-d519c4e7-0d47-4643-8c31-acb2f6ee38b2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.088309] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquired lock "refresh_cache-d519c4e7-0d47-4643-8c31-acb2f6ee38b2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.088478] env[62820]: DEBUG nova.network.neutron [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1852.183686] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 67f0b65c-9b1e-4c87-aa2e-8f8bae3938ff] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1852.200565] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696442, 'name': Rename_Task, 'duration_secs': 0.157069} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.200852] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1852.201137] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85ae2f0a-0c78-42cb-8c02-9d2122c4d228 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.204252] env[62820]: DEBUG nova.compute.utils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1852.205589] env[62820]: DEBUG nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1852.205768] env[62820]: DEBUG nova.network.neutron [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1852.212692] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1852.212692] env[62820]: value = "task-1696444" [ 1852.212692] env[62820]: _type = "Task" [ 1852.212692] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.221985] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.246476] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b96cbcd4-5ba1-4398-b775-64709aa54fa7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "b89d32f8-0675-4b0c-977e-b7900e62bdd8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.465s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.250861] env[62820]: DEBUG nova.policy [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a11a3b5fb67a49ceb7bceb2770021fcf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2ccee293cde400f927db43f421cd50d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1852.311708] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5211ad44-e0ef-a41c-b6e1-662bff7149be, 'name': SearchDatastore_Task, 'duration_secs': 0.011565} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.314245] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.314512] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df/2aeeb809-0b27-411b-b632-ef4d61b295df.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1852.315221] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a0bdb42-0cc5-4b97-976d-274add0b60dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.322877] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1852.322877] env[62820]: value = "task-1696445" [ 1852.322877] env[62820]: _type = "Task" [ 1852.322877] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.333599] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.390407] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f033f030-945d-4735-9f5f-e63804d3f572 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.399400] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635cd5cf-3b5d-49eb-ae2c-87de74a627f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.431313] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033ddb40-3b98-426c-8879-b8c71a82200c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.438962] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa555fc-9dbc-4f47-bc90-7a8175ac30ed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.453034] env[62820]: DEBUG nova.compute.provider_tree [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1852.517976] env[62820]: DEBUG oslo_vmware.api [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696443, 'name': PowerOffVM_Task, 'duration_secs': 0.21013} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.518486] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1852.518664] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1852.518928] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87d0717e-75e1-4b52-b8e7-3f26ffc3394b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.621355] env[62820]: DEBUG nova.network.neutron [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Successfully created port: ad74c59c-92d9-43b7-8a73-b480a40ae561 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1852.654880] env[62820]: DEBUG nova.network.neutron [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1852.663990] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1852.664183] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1852.665836] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleting the datastore file [datastore1] 3bff732c-9d4f-4dfa-8058-42c4dbde2efe {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1852.665836] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-905705f7-5853-4bbd-9bd0-34fd4f50b164 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.673240] env[62820]: DEBUG oslo_vmware.api [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1852.673240] env[62820]: value = "task-1696447" [ 1852.673240] env[62820]: _type = "Task" [ 1852.673240] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.681823] env[62820]: DEBUG oslo_vmware.api [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.686495] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 46434419-d6de-4cc1-905c-14698512b7a5] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1852.709877] env[62820]: DEBUG nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1852.731897] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696444, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.839200] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696445, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.956052] env[62820]: DEBUG nova.scheduler.client.report [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1852.997645] env[62820]: DEBUG nova.network.neutron [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Updating instance_info_cache with network_info: [{"id": "26b4189b-0a18-4e7c-b07b-c63278a422e5", "address": "fa:16:3e:60:3e:92", "network": {"id": "af3f4540-ad3b-4f00-87a1-534882d6e4a6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-781108545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf2b1ea01551466fb52c3770dc8f615f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26b4189b-0a", "ovs_interfaceid": "26b4189b-0a18-4e7c-b07b-c63278a422e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.182043] env[62820]: DEBUG oslo_vmware.api [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.456133} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.182161] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1853.182284] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1853.182465] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1853.182635] env[62820]: INFO nova.compute.manager [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1853.182865] env[62820]: DEBUG oslo.service.loopingcall [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.183062] env[62820]: DEBUG nova.compute.manager [-] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1853.183161] env[62820]: DEBUG nova.network.neutron [-] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1853.193869] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0d519bc8-3cc1-429e-b41b-ed0035622562] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1853.231342] env[62820]: DEBUG oslo_vmware.api [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696444, 'name': PowerOnVM_Task, 'duration_secs': 0.764624} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.231605] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1853.231843] env[62820]: INFO nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Took 8.49 seconds to spawn the instance on the hypervisor. [ 1853.231977] env[62820]: DEBUG nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1853.232758] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae07d833-43aa-4075-bc00-19a3f04477d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.337680] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555353} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.337804] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df/2aeeb809-0b27-411b-b632-ef4d61b295df.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1853.338065] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1853.338373] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26abfb6b-0352-45b4-8471-8d9efa77db15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.346393] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1853.346393] env[62820]: value = "task-1696448" [ 1853.346393] env[62820]: _type = "Task" [ 1853.346393] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.354095] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.422936] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "41666e62-526d-4553-a005-07cbc2321d0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.423190] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "41666e62-526d-4553-a005-07cbc2321d0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.450816] env[62820]: DEBUG nova.compute.manager [req-01553a2e-f916-4cd5-8faf-e754586573fa req-2f58bbdb-364e-48fd-9125-a2db8eb2da59 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Received event network-vif-deleted-c61b3713-021f-484e-a5bf-16202f289715 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1853.451016] env[62820]: INFO nova.compute.manager [req-01553a2e-f916-4cd5-8faf-e754586573fa req-2f58bbdb-364e-48fd-9125-a2db8eb2da59 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Neutron deleted interface c61b3713-021f-484e-a5bf-16202f289715; detaching it from the instance and deleting it from the info cache [ 1853.451189] env[62820]: DEBUG nova.network.neutron [req-01553a2e-f916-4cd5-8faf-e754586573fa req-2f58bbdb-364e-48fd-9125-a2db8eb2da59 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.461083] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.757s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.463420] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.091s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.480222] env[62820]: INFO nova.scheduler.client.report [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleted allocations for instance 03b0abc8-dd32-4cf9-8750-d64b8a66695e [ 1853.500730] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Releasing lock "refresh_cache-d519c4e7-0d47-4643-8c31-acb2f6ee38b2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.501051] env[62820]: DEBUG nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Instance network_info: |[{"id": "26b4189b-0a18-4e7c-b07b-c63278a422e5", "address": "fa:16:3e:60:3e:92", "network": {"id": "af3f4540-ad3b-4f00-87a1-534882d6e4a6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-781108545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf2b1ea01551466fb52c3770dc8f615f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26b4189b-0a", "ovs_interfaceid": "26b4189b-0a18-4e7c-b07b-c63278a422e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1853.501452] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:3e:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26b4189b-0a18-4e7c-b07b-c63278a422e5', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1853.509131] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Creating folder: Project (cf2b1ea01551466fb52c3770dc8f615f). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1853.509428] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03e35c2e-6844-4698-8742-ba71d28d8b03 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.520460] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Created folder: Project (cf2b1ea01551466fb52c3770dc8f615f) in parent group-v353379. [ 1853.520643] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Creating folder: Instances. Parent ref: group-v353679. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1853.520860] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07f24a81-d7ae-462f-84ed-74bfdd400f42 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.529600] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Created folder: Instances in parent group-v353679. [ 1853.529817] env[62820]: DEBUG oslo.service.loopingcall [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.529991] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1853.530190] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1aa18f38-c8bb-4727-aa83-0c0e54f64490 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.550023] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1853.550023] env[62820]: value = "task-1696451" [ 1853.550023] env[62820]: _type = "Task" [ 1853.550023] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.556568] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696451, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.696413] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 9a1b9c99-57ef-4c16-97ca-739917c6c3d7] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1853.727267] env[62820]: DEBUG nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1853.750696] env[62820]: INFO nova.compute.manager [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Took 14.21 seconds to build instance. [ 1853.759726] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1853.759726] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1853.759726] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1853.759726] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1853.759726] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1853.759726] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1853.759726] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1853.760101] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1853.760101] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1853.760405] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1853.760461] env[62820]: DEBUG nova.virt.hardware [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1853.761745] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875ee717-0e0d-4873-b94d-1eb74956f419 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.770550] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abac86a-90a4-4642-a0ac-31fd9f4257c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.855985] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.160279} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.858027] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1853.858027] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00352d50-d6b7-4dce-ad44-059f0a719663 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.880420] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df/2aeeb809-0b27-411b-b632-ef4d61b295df.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1853.880705] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5dc63eeb-5233-4871-8fb3-2e612fd6e904 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.905434] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1853.905434] env[62820]: value = "task-1696452" [ 1853.905434] env[62820]: _type = "Task" [ 1853.905434] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.915324] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696452, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.925571] env[62820]: DEBUG nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1853.930855] env[62820]: DEBUG nova.network.neutron [-] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.953957] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe33749b-b162-4527-8ac1-cdc166a090d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.963347] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3c2fef-7aad-4ab9-811b-9f42dc945e23 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.977114] env[62820]: INFO nova.compute.claims [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1853.988986] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b81d1456-20b8-4c3f-ae12-73769f808f49 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "03b0abc8-dd32-4cf9-8750-d64b8a66695e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.752s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.999667] env[62820]: DEBUG nova.compute.manager [req-01553a2e-f916-4cd5-8faf-e754586573fa req-2f58bbdb-364e-48fd-9125-a2db8eb2da59 service nova] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Detach interface failed, port_id=c61b3713-021f-484e-a5bf-16202f289715, reason: Instance 3bff732c-9d4f-4dfa-8058-42c4dbde2efe could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1854.060495] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696451, 'name': CreateVM_Task, 'duration_secs': 0.397267} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.060495] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1854.061187] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.061362] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.061829] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1854.061965] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f76450-fd93-4177-8306-2ed3b9ccba9f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.066661] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1854.066661] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5210f2fe-545d-2b72-6341-29782aa574b5" [ 1854.066661] env[62820]: _type = "Task" [ 1854.066661] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.077176] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5210f2fe-545d-2b72-6341-29782aa574b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.199664] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 361b7da3-0e8c-4291-aba0-8b6116b8032f] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1854.252343] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b46c6c50-cdba-49a4-b935-738fcb2a12d7 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.728s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.417077] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696452, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.439022] env[62820]: INFO nova.compute.manager [-] [instance: 3bff732c-9d4f-4dfa-8058-42c4dbde2efe] Took 1.25 seconds to deallocate network for instance. [ 1854.459265] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.482899] env[62820]: INFO nova.compute.resource_tracker [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating resource usage from migration 8e1629bd-9bfc-40aa-9009-3ac711cb7167 [ 1854.578570] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5210f2fe-545d-2b72-6341-29782aa574b5, 'name': SearchDatastore_Task, 'duration_secs': 0.01032} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.581181] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.581461] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1854.581702] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.581848] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.582119] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1854.582575] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73707da2-543f-493c-9c8c-b8187fec0b32 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.612527] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1854.612527] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1854.612936] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-056b1840-1392-42f1-9ad4-a3407544a418 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.618580] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1854.618580] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520e9ad7-2f18-ed68-4c1c-e0f5c3a3d897" [ 1854.618580] env[62820]: _type = "Task" [ 1854.618580] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.631182] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520e9ad7-2f18-ed68-4c1c-e0f5c3a3d897, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.684375] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb792edd-d05d-4525-9dce-3f6031f853bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.691901] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5087f676-4ff3-4970-859e-55036863a765 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.696683] env[62820]: DEBUG nova.network.neutron [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Successfully updated port: ad74c59c-92d9-43b7-8a73-b480a40ae561 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1854.726533] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: c0d14c00-2c93-490c-8b17-91d3b5ee5b3d] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1854.729543] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aee2857-7cc8-43cb-b87b-f60fb005c05d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.742046] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2f96f2-b693-4607-bba8-3e42eaf18fb8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.760160] env[62820]: DEBUG nova.compute.provider_tree [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1854.919126] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696452, 'name': ReconfigVM_Task, 'duration_secs': 0.597563} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.919483] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df/2aeeb809-0b27-411b-b632-ef4d61b295df.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1854.920235] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4335b71e-4ce3-451f-b716-45e9538e65fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.927804] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1854.927804] env[62820]: value = "task-1696453" [ 1854.927804] env[62820]: _type = "Task" [ 1854.927804] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.936512] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696453, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.952044] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.129635] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520e9ad7-2f18-ed68-4c1c-e0f5c3a3d897, 'name': SearchDatastore_Task, 'duration_secs': 0.018825} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.130639] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1d07ac0-f1d7-442f-acea-81cee97a9a77 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.136119] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1855.136119] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5287b5f6-7c0e-a441-bc97-8ca1aebcf761" [ 1855.136119] env[62820]: _type = "Task" [ 1855.136119] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.144042] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5287b5f6-7c0e-a441-bc97-8ca1aebcf761, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.201722] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.201722] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.201722] env[62820]: DEBUG nova.network.neutron [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1855.237137] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: e420644c-cfcc-4f8c-ae03-c9ebef585690] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1855.263451] env[62820]: DEBUG nova.scheduler.client.report [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1855.359952] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.360208] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.360449] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.360657] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.360803] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.365194] env[62820]: INFO nova.compute.manager [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Terminating instance [ 1855.438093] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696453, 'name': Rename_Task, 'duration_secs': 0.14482} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.438406] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1855.438673] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0580ced1-b42a-45a2-9122-f885ade067be {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.445567] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1855.445567] env[62820]: value = "task-1696454" [ 1855.445567] env[62820]: _type = "Task" [ 1855.445567] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.453659] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.478533] env[62820]: DEBUG nova.compute.manager [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Received event network-vif-plugged-ad74c59c-92d9-43b7-8a73-b480a40ae561 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1855.478763] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Acquiring lock "c15bbb69-84a0-4fda-a509-66218b9c9f70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.478988] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.479177] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.479436] env[62820]: DEBUG nova.compute.manager [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] No waiting events found dispatching network-vif-plugged-ad74c59c-92d9-43b7-8a73-b480a40ae561 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1855.479610] env[62820]: WARNING nova.compute.manager [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Received unexpected event network-vif-plugged-ad74c59c-92d9-43b7-8a73-b480a40ae561 for instance with vm_state building and task_state spawning. [ 1855.479775] env[62820]: DEBUG nova.compute.manager [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Received event network-changed-ad74c59c-92d9-43b7-8a73-b480a40ae561 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1855.479940] env[62820]: DEBUG nova.compute.manager [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Refreshing instance network info cache due to event network-changed-ad74c59c-92d9-43b7-8a73-b480a40ae561. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1855.480124] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Acquiring lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.649017] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5287b5f6-7c0e-a441-bc97-8ca1aebcf761, 'name': SearchDatastore_Task, 'duration_secs': 0.03761} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.649341] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.649652] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] d519c4e7-0d47-4643-8c31-acb2f6ee38b2/d519c4e7-0d47-4643-8c31-acb2f6ee38b2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1855.649894] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f979100d-eebe-4167-9364-c344e0a2303f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.657934] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1855.657934] env[62820]: value = "task-1696455" [ 1855.657934] env[62820]: _type = "Task" [ 1855.657934] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.667232] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.733251] env[62820]: DEBUG nova.network.neutron [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1855.741282] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: ba5b0055-b756-4f80-ba6b-7e8b705d2970] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1855.769308] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.306s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.769589] env[62820]: INFO nova.compute.manager [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Migrating [ 1855.781232] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.322s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.782904] env[62820]: INFO nova.compute.claims [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1855.868816] env[62820]: DEBUG nova.compute.manager [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1855.869273] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1855.870182] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb2b21f-b84c-4be8-9531-41b9389deecf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.879787] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1855.880077] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ea509e7-9583-4437-8008-4d872f03898f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.887262] env[62820]: DEBUG oslo_vmware.api [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1855.887262] env[62820]: value = "task-1696456" [ 1855.887262] env[62820]: _type = "Task" [ 1855.887262] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.896875] env[62820]: DEBUG oslo_vmware.api [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696456, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.954764] env[62820]: DEBUG nova.network.neutron [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Updating instance_info_cache with network_info: [{"id": "ad74c59c-92d9-43b7-8a73-b480a40ae561", "address": "fa:16:3e:70:13:d8", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad74c59c-92", "ovs_interfaceid": "ad74c59c-92d9-43b7-8a73-b480a40ae561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.960075] env[62820]: DEBUG oslo_vmware.api [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696454, 'name': PowerOnVM_Task, 'duration_secs': 0.482496} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.960721] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1855.960977] env[62820]: INFO nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1855.961275] env[62820]: DEBUG nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1855.962291] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a09360b-5c49-4211-bfce-5647b6d4e22d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.168539] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696455, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.217381] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "4d69baaa-83da-4c5f-b88f-928693505520" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.217671] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.244778] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 2587a273-0115-483a-ba5e-994c87bbc4d0] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1856.295014] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.295197] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.295370] env[62820]: DEBUG nova.network.neutron [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1856.396980] env[62820]: DEBUG oslo_vmware.api [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696456, 'name': PowerOffVM_Task, 'duration_secs': 0.332699} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.397348] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1856.397531] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1856.397779] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b4af59a-d8b7-4231-ba4e-459dedf89788 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.462128] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.462354] env[62820]: DEBUG nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Instance network_info: |[{"id": "ad74c59c-92d9-43b7-8a73-b480a40ae561", "address": "fa:16:3e:70:13:d8", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad74c59c-92", "ovs_interfaceid": "ad74c59c-92d9-43b7-8a73-b480a40ae561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1856.462691] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Acquired lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.462871] env[62820]: DEBUG nova.network.neutron [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Refreshing network info cache for port ad74c59c-92d9-43b7-8a73-b480a40ae561 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1856.464154] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:13:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '30c39e9a-a798-4f25-a48c-91f786ba332c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad74c59c-92d9-43b7-8a73-b480a40ae561', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1856.471598] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Creating folder: Project (e2ccee293cde400f927db43f421cd50d). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1856.472747] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9be235b6-f2e6-443c-9967-6087f19362e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.484635] env[62820]: INFO nova.compute.manager [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Took 14.40 seconds to build instance. [ 1856.487107] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1856.487107] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1856.487107] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleting the datastore file [datastore1] 0dd0e112-7a7c-4b37-8938-bb98aab2d485 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1856.487107] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f24bd34-4525-4bce-ba1a-7ff7be52cdf8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.493361] env[62820]: DEBUG oslo_vmware.api [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1856.493361] env[62820]: value = "task-1696459" [ 1856.493361] env[62820]: _type = "Task" [ 1856.493361] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.494618] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Created folder: Project (e2ccee293cde400f927db43f421cd50d) in parent group-v353379. [ 1856.494799] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Creating folder: Instances. Parent ref: group-v353682. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1856.498316] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48b52c2a-7fcf-42b3-a002-698f572c2cfb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.505359] env[62820]: DEBUG oslo_vmware.api [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.507049] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Created folder: Instances in parent group-v353682. [ 1856.507307] env[62820]: DEBUG oslo.service.loopingcall [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1856.507502] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1856.507710] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d11afd4a-fe56-4646-bff9-8c9625d9784c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.527097] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1856.527097] env[62820]: value = "task-1696461" [ 1856.527097] env[62820]: _type = "Task" [ 1856.527097] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.534882] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696461, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.670361] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715732} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.670644] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] d519c4e7-0d47-4643-8c31-acb2f6ee38b2/d519c4e7-0d47-4643-8c31-acb2f6ee38b2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1856.670859] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1856.671150] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0f3781d-a5ad-48e5-9c9f-b9809137bc1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.678175] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1856.678175] env[62820]: value = "task-1696462" [ 1856.678175] env[62820]: _type = "Task" [ 1856.678175] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.686405] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.720249] env[62820]: DEBUG nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1856.748918] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4ae63ae5-0306-4540-be88-6e7d909c38a3] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1856.989783] env[62820]: DEBUG oslo_concurrency.lockutils [None req-16beff00-246f-4e1d-8034-5878d1e4851c tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.919s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.003579] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57c7ccc-2b78-4771-bf9a-0fcfc5d50cf4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.011592] env[62820]: DEBUG oslo_vmware.api [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.478859} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.013627] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1857.013846] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1857.014051] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1857.014239] env[62820]: INFO nova.compute.manager [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1857.014493] env[62820]: DEBUG oslo.service.loopingcall [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1857.014771] env[62820]: DEBUG nova.compute.manager [-] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1857.014883] env[62820]: DEBUG nova.network.neutron [-] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1857.018154] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e56a892-3165-4459-8f13-ec3db235224b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.721721] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 3a325dbf-87fb-4f7e-a665-e5d181333a5c] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1857.739951] env[62820]: DEBUG nova.compute.manager [req-a4a720ad-3c9d-44f3-81e0-cf310cac0276 req-8d57217a-f2c0-4601-ae22-1ed8f0a8cf29 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received event network-changed-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1857.739951] env[62820]: DEBUG nova.compute.manager [req-a4a720ad-3c9d-44f3-81e0-cf310cac0276 req-8d57217a-f2c0-4601-ae22-1ed8f0a8cf29 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Refreshing instance network info cache due to event network-changed-7f7affc8-f587-4484-9eef-211d6ea80226. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1857.739951] env[62820]: DEBUG oslo_concurrency.lockutils [req-a4a720ad-3c9d-44f3-81e0-cf310cac0276 req-8d57217a-f2c0-4601-ae22-1ed8f0a8cf29 service nova] Acquiring lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.739951] env[62820]: DEBUG oslo_concurrency.lockutils [req-a4a720ad-3c9d-44f3-81e0-cf310cac0276 req-8d57217a-f2c0-4601-ae22-1ed8f0a8cf29 service nova] Acquired lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.739951] env[62820]: DEBUG nova.network.neutron [req-a4a720ad-3c9d-44f3-81e0-cf310cac0276 req-8d57217a-f2c0-4601-ae22-1ed8f0a8cf29 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Refreshing network info cache for port 7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1857.740604] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc43f829-3c2d-429c-9f72-505407f681f6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.751872] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696461, 'name': CreateVM_Task, 'duration_secs': 0.49333} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.754551] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1857.755488] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094079} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.756366] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.756990] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.757180] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.757489] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1857.758685] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ef47ed-2d4a-490c-9f31-8ea83811b195 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.762690] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1857.763149] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1db7acf2-b0db-4128-9239-7e9a794c1f1a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.765274] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9aff3a-1046-4421-be03-7486b066bf29 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.771182] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1857.771182] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52fd6562-cdd6-ba41-b81c-2df4228d32fe" [ 1857.771182] env[62820]: _type = "Task" [ 1857.771182] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.795666] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] d519c4e7-0d47-4643-8c31-acb2f6ee38b2/d519c4e7-0d47-4643-8c31-acb2f6ee38b2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1857.796432] env[62820]: DEBUG nova.compute.provider_tree [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1857.801388] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-940f6204-64aa-4270-8666-3f1c4b75c2eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.818058] env[62820]: DEBUG nova.scheduler.client.report [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1857.826956] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fd6562-cdd6-ba41-b81c-2df4228d32fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.828442] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1857.828442] env[62820]: value = "task-1696463" [ 1857.828442] env[62820]: _type = "Task" [ 1857.828442] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.837438] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696463, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.206807] env[62820]: DEBUG nova.network.neutron [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [{"id": "f7027439-2429-4746-8bc9-a95ce975c96a", "address": "fa:16:3e:80:90:a0", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7027439-24", "ovs_interfaceid": "f7027439-2429-4746-8bc9-a95ce975c96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.228431] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b7806d81-eb2d-4724-8c40-ed88c8c77870] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1858.299662] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52fd6562-cdd6-ba41-b81c-2df4228d32fe, 'name': SearchDatastore_Task, 'duration_secs': 0.072473} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.300715] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.300715] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1858.300715] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.301341] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.301341] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1858.301341] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f05e45eb-624f-4c16-aa47-6651077e7013 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.310189] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1858.310385] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1858.311083] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aad5e7b-1745-457b-97f5-20c4dd8335fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.316438] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1858.316438] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c4ff94-765d-4ff6-fbb1-f31e9be6ecc8" [ 1858.316438] env[62820]: _type = "Task" [ 1858.316438] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.321837] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.322333] env[62820]: DEBUG nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1858.328331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.377s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.328430] env[62820]: DEBUG nova.objects.instance [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lazy-loading 'resources' on Instance uuid 3bff732c-9d4f-4dfa-8058-42c4dbde2efe {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1858.329369] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c4ff94-765d-4ff6-fbb1-f31e9be6ecc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.339997] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696463, 'name': ReconfigVM_Task, 'duration_secs': 0.289211} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.340759] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Reconfigured VM instance instance-00000070 to attach disk [datastore1] d519c4e7-0d47-4643-8c31-acb2f6ee38b2/d519c4e7-0d47-4643-8c31-acb2f6ee38b2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1858.345499] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dfce86a-c4e0-4ddb-94e4-25d76eb49c12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.350311] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1858.350311] env[62820]: value = "task-1696464" [ 1858.350311] env[62820]: _type = "Task" [ 1858.350311] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.356048] env[62820]: DEBUG nova.network.neutron [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Updated VIF entry in instance network info cache for port ad74c59c-92d9-43b7-8a73-b480a40ae561. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1858.356740] env[62820]: DEBUG nova.network.neutron [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Updating instance_info_cache with network_info: [{"id": "ad74c59c-92d9-43b7-8a73-b480a40ae561", "address": "fa:16:3e:70:13:d8", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad74c59c-92", "ovs_interfaceid": "ad74c59c-92d9-43b7-8a73-b480a40ae561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.360931] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696464, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.598935] env[62820]: DEBUG nova.network.neutron [req-a4a720ad-3c9d-44f3-81e0-cf310cac0276 req-8d57217a-f2c0-4601-ae22-1ed8f0a8cf29 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updated VIF entry in instance network info cache for port 7f7affc8-f587-4484-9eef-211d6ea80226. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1858.598935] env[62820]: DEBUG nova.network.neutron [req-a4a720ad-3c9d-44f3-81e0-cf310cac0276 req-8d57217a-f2c0-4601-ae22-1ed8f0a8cf29 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [{"id": "7f7affc8-f587-4484-9eef-211d6ea80226", "address": "fa:16:3e:e1:6f:92", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7affc8-f5", "ovs_interfaceid": "7f7affc8-f587-4484-9eef-211d6ea80226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.710536] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.730900] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 93e1a842-d598-4798-88ad-622ae5dbf057] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1858.827709] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c4ff94-765d-4ff6-fbb1-f31e9be6ecc8, 'name': SearchDatastore_Task, 'duration_secs': 0.009133} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.828506] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8f60504-0efd-4f35-b233-996ebe2beec3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.831763] env[62820]: DEBUG nova.compute.utils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1858.833076] env[62820]: DEBUG nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1858.833181] env[62820]: DEBUG nova.network.neutron [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1858.840219] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1858.840219] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527dea58-2dd2-d9c2-bcc7-bc4172e646b7" [ 1858.840219] env[62820]: _type = "Task" [ 1858.840219] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.849202] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527dea58-2dd2-d9c2-bcc7-bc4172e646b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.859258] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696464, 'name': Rename_Task, 'duration_secs': 0.157926} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.861788] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1858.862260] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Releasing lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.862490] env[62820]: DEBUG nova.compute.manager [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Received event network-changed-d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1858.862652] env[62820]: DEBUG nova.compute.manager [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Refreshing instance network info cache due to event network-changed-d3537ab9-0a82-437a-83c1-ffb18a60490a. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1858.862861] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Acquiring lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.863056] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Acquired lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.863361] env[62820]: DEBUG nova.network.neutron [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Refreshing network info cache for port d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1858.864826] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a817a7bf-1f78-4c90-ac9e-900cc5cb5666 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.872751] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1858.872751] env[62820]: value = "task-1696465" [ 1858.872751] env[62820]: _type = "Task" [ 1858.872751] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.877944] env[62820]: DEBUG nova.policy [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18246bae0222415c96ec5b252cf5bd6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57b0c64a8704e7aaeba4011866c7a24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1858.884794] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696465, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.941899] env[62820]: DEBUG nova.network.neutron [-] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.064311] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba03c1d-166d-42a3-8de6-8e311cf59a85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.073921] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eeebe8f-04f3-4c75-b1a0-c5580795a99a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.104992] env[62820]: DEBUG oslo_concurrency.lockutils [req-a4a720ad-3c9d-44f3-81e0-cf310cac0276 req-8d57217a-f2c0-4601-ae22-1ed8f0a8cf29 service nova] Releasing lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.106070] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df612e4-c08e-4aa9-83c5-1c51183a393d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.113960] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf172e6c-ad2b-4472-80a2-e48d7aecec8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.127453] env[62820]: DEBUG nova.compute.provider_tree [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.225327] env[62820]: DEBUG nova.network.neutron [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Successfully created port: 02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1859.234459] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 11843b38-3ce4-42a7-b855-a9d0b473e796] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1859.338193] env[62820]: DEBUG nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1859.351522] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527dea58-2dd2-d9c2-bcc7-bc4172e646b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010643} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.351804] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.352086] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70/c15bbb69-84a0-4fda-a509-66218b9c9f70.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1859.352352] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9207903-660e-4e8e-9653-1423e4d53e82 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.359109] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1859.359109] env[62820]: value = "task-1696466" [ 1859.359109] env[62820]: _type = "Task" [ 1859.359109] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.368640] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.381626] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696465, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.447562] env[62820]: INFO nova.compute.manager [-] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Took 2.43 seconds to deallocate network for instance. [ 1859.630953] env[62820]: DEBUG nova.scheduler.client.report [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1859.721289] env[62820]: DEBUG nova.network.neutron [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updated VIF entry in instance network info cache for port d3537ab9-0a82-437a-83c1-ffb18a60490a. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1859.721784] env[62820]: DEBUG nova.network.neutron [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updating instance_info_cache with network_info: [{"id": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "address": "fa:16:3e:96:64:a6", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3537ab9-0a", "ovs_interfaceid": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.725397] env[62820]: DEBUG nova.compute.manager [req-e9b88f93-adfa-42ee-9a26-31931931ad33 req-8f448bda-3b8b-4115-a80b-d0fecfcab328 service nova] [instance: 0dd0e112-7a7c-4b37-8938-bb98aab2d485] Received event network-vif-deleted-a4c265b9-9afd-44f1-b48d-b95d490dc950 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1859.737637] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: a150a0d8-afcc-4a5b-a014-2c25a9bc4f07] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1859.869916] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.883579] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696465, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.954081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.140624] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.143438] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.387s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.144942] env[62820]: INFO nova.compute.claims [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1860.166138] env[62820]: INFO nova.scheduler.client.report [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted allocations for instance 3bff732c-9d4f-4dfa-8058-42c4dbde2efe [ 1860.224070] env[62820]: DEBUG oslo_concurrency.lockutils [req-772c2e2d-ba6e-4d62-8ce4-2f8ed6e7fbc8 req-b91d8b90-9fc0-4c20-b904-fc5f4fb15f6d service nova] Releasing lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.229399] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3c02a9-2c82-47b1-89ce-9a922c010476 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.252657] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 210277a2-dd10-4e08-8627-4b025a554410] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1860.255450] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance 'a8803178-7fa3-42ea-824c-901063673062' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1860.351913] env[62820]: DEBUG nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1860.369452] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.379816] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1860.380056] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1860.380241] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1860.380406] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1860.380550] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1860.380694] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1860.380894] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1860.381069] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1860.381239] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1860.381402] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1860.381578] env[62820]: DEBUG nova.virt.hardware [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1860.382300] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbadffa-9dbd-4d4e-864e-404a2fd99f62 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.392148] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4422f39-28ba-4c29-b890-a866e97e0e1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.395531] env[62820]: DEBUG oslo_vmware.api [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696465, 'name': PowerOnVM_Task, 'duration_secs': 1.111851} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.395766] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1860.395962] env[62820]: INFO nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Took 10.84 seconds to spawn the instance on the hypervisor. [ 1860.396153] env[62820]: DEBUG nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1860.397106] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475f9f4f-64ff-4d23-8962-3f3fe9ae57fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.678415] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6aacfabc-1fb0-4ece-a6d3-8bc18c022bb0 tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "3bff732c-9d4f-4dfa-8058-42c4dbde2efe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.197s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.762019] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: a8486f52-998d-4308-813a-9c651e2eb093] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1860.763398] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1860.764291] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28e9fd6e-dacc-4185-b6fb-1394c2b37e58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.773113] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1860.773113] env[62820]: value = "task-1696467" [ 1860.773113] env[62820]: _type = "Task" [ 1860.773113] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.787799] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.881417] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696466, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.923919] env[62820]: INFO nova.compute.manager [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Took 15.76 seconds to build instance. [ 1861.000806] env[62820]: DEBUG nova.network.neutron [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Successfully updated port: 02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1861.264919] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 7a755ef6-67bc-4242-9343-c54c8566adf8] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1861.289431] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696467, 'name': PowerOffVM_Task, 'duration_secs': 0.327443} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.290362] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1861.290362] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance 'a8803178-7fa3-42ea-824c-901063673062' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1861.334087] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99e59a5-9743-41ac-8765-26128fbff86b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.342087] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc616da1-9cca-41ee-951d-ae3d3056c145 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.375522] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d788b4-2564-41ee-8e79-629b76c7a3ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.383701] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696466, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.842748} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.385673] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70/c15bbb69-84a0-4fda-a509-66218b9c9f70.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1861.385898] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1861.386183] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af342e8e-d711-4cee-84f2-082d5a915fb0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.388754] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9714049-666f-4679-abbe-51490ca866bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.401882] env[62820]: DEBUG nova.compute.provider_tree [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.404307] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1861.404307] env[62820]: value = "task-1696468" [ 1861.404307] env[62820]: _type = "Task" [ 1861.404307] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.412980] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696468, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.421662] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquiring lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.426352] env[62820]: DEBUG oslo_concurrency.lockutils [None req-faf7cec7-c572-4a1c-89a9-ce2ef052fa2b tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.274s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.426617] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.005s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.426827] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquiring lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.427036] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.427208] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.429172] env[62820]: INFO nova.compute.manager [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Terminating instance [ 1861.503635] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.503797] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.503929] env[62820]: DEBUG nova.network.neutron [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1861.749872] env[62820]: DEBUG nova.compute.manager [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Received event network-vif-plugged-02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1861.750131] env[62820]: DEBUG oslo_concurrency.lockutils [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] Acquiring lock "41666e62-526d-4553-a005-07cbc2321d0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.750355] env[62820]: DEBUG oslo_concurrency.lockutils [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] Lock "41666e62-526d-4553-a005-07cbc2321d0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.750533] env[62820]: DEBUG oslo_concurrency.lockutils [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] Lock "41666e62-526d-4553-a005-07cbc2321d0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.750694] env[62820]: DEBUG nova.compute.manager [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] No waiting events found dispatching network-vif-plugged-02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1861.750864] env[62820]: WARNING nova.compute.manager [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Received unexpected event network-vif-plugged-02ad8941-576b-4634-8cba-ffa38ff466c5 for instance with vm_state building and task_state spawning. [ 1861.751034] env[62820]: DEBUG nova.compute.manager [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Received event network-changed-02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1861.751262] env[62820]: DEBUG nova.compute.manager [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Refreshing instance network info cache due to event network-changed-02ad8941-576b-4634-8cba-ffa38ff466c5. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1861.751443] env[62820]: DEBUG oslo_concurrency.lockutils [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] Acquiring lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.770942] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 871195a8-8b7d-433f-a0b5-c570c65faf1e] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1861.796107] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1861.796201] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1861.796352] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1861.796535] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1861.796684] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1861.796835] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1861.797051] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1861.797215] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1861.797396] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1861.797564] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1861.797735] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1861.803404] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b696fea-56bc-4672-8788-a1dac30f78fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.819876] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1861.819876] env[62820]: value = "task-1696469" [ 1861.819876] env[62820]: _type = "Task" [ 1861.819876] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.828195] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.882542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "2fe561a2-57ad-4385-830e-61cd274c7123" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.882887] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "2fe561a2-57ad-4385-830e-61cd274c7123" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.906795] env[62820]: DEBUG nova.scheduler.client.report [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1861.924287] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696468, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083379} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.924773] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1861.925564] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1439c870-0f32-4262-9627-7d395acbf661 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.942187] env[62820]: DEBUG nova.compute.manager [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1861.942289] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1861.951535] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70/c15bbb69-84a0-4fda-a509-66218b9c9f70.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1861.954277] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffdae4c-71bc-427d-b014-c06e17f00766 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.957093] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef3f6b93-67b3-4966-92a6-920896fb88fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.977825] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1861.979075] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-346fa4cb-2f8e-4d96-966f-d0808b9d3cda {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.980755] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1861.980755] env[62820]: value = "task-1696470" [ 1861.980755] env[62820]: _type = "Task" [ 1861.980755] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.985540] env[62820]: DEBUG oslo_vmware.api [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1861.985540] env[62820]: value = "task-1696471" [ 1861.985540] env[62820]: _type = "Task" [ 1861.985540] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.991882] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696470, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.996524] env[62820]: DEBUG oslo_vmware.api [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.035252] env[62820]: DEBUG nova.network.neutron [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1862.172883] env[62820]: DEBUG nova.network.neutron [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [{"id": "02ad8941-576b-4634-8cba-ffa38ff466c5", "address": "fa:16:3e:a5:00:bc", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02ad8941-57", "ovs_interfaceid": "02ad8941-576b-4634-8cba-ffa38ff466c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.274766] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 492db939-78f4-4642-89dd-a01fa94f41b5] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1862.330448] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696469, 'name': ReconfigVM_Task, 'duration_secs': 0.470221} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.330801] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance 'a8803178-7fa3-42ea-824c-901063673062' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1862.385228] env[62820]: DEBUG nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1862.418947] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.276s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.420048] env[62820]: DEBUG nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1862.424542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.470s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.424542] env[62820]: DEBUG nova.objects.instance [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lazy-loading 'resources' on Instance uuid 0dd0e112-7a7c-4b37-8938-bb98aab2d485 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1862.493441] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696470, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.498193] env[62820]: DEBUG oslo_vmware.api [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696471, 'name': PowerOffVM_Task, 'duration_secs': 0.449294} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.498454] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1862.498623] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1862.498857] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfbe0c44-27fb-4425-8499-e3b247d817c0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.642768] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1862.642992] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1862.643195] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Deleting the datastore file [datastore1] d519c4e7-0d47-4643-8c31-acb2f6ee38b2 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1862.643456] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b74653e1-96f6-483b-96cb-e2877f2f612a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.650085] env[62820]: DEBUG oslo_vmware.api [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for the task: (returnval){ [ 1862.650085] env[62820]: value = "task-1696473" [ 1862.650085] env[62820]: _type = "Task" [ 1862.650085] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.657413] env[62820]: DEBUG oslo_vmware.api [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696473, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.676015] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.676349] env[62820]: DEBUG nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Instance network_info: |[{"id": "02ad8941-576b-4634-8cba-ffa38ff466c5", "address": "fa:16:3e:a5:00:bc", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02ad8941-57", "ovs_interfaceid": "02ad8941-576b-4634-8cba-ffa38ff466c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1862.676661] env[62820]: DEBUG oslo_concurrency.lockutils [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] Acquired lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.676841] env[62820]: DEBUG nova.network.neutron [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Refreshing network info cache for port 02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1862.678079] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:00:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02ad8941-576b-4634-8cba-ffa38ff466c5', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1862.685345] env[62820]: DEBUG oslo.service.loopingcall [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1862.688279] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1862.688753] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0268042-b096-4c15-8e2a-bee272c68b65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.709770] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1862.709770] env[62820]: value = "task-1696474" [ 1862.709770] env[62820]: _type = "Task" [ 1862.709770] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.719165] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.778423] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 207efed9-20ea-4b9e-bca2-45521b41de6a] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1862.837405] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1862.837675] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1862.837833] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1862.838028] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1862.838186] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1862.838346] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1862.838552] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1862.838711] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1862.838930] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1862.839203] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1862.839510] env[62820]: DEBUG nova.virt.hardware [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1862.844939] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1862.848067] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1283054-81d0-43a4-a2e4-0c10eb828fa1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.868044] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1862.868044] env[62820]: value = "task-1696475" [ 1862.868044] env[62820]: _type = "Task" [ 1862.868044] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.876484] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.909411] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.927556] env[62820]: DEBUG nova.compute.utils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1862.932876] env[62820]: DEBUG nova.network.neutron [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updated VIF entry in instance network info cache for port 02ad8941-576b-4634-8cba-ffa38ff466c5. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1862.933549] env[62820]: DEBUG nova.network.neutron [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [{"id": "02ad8941-576b-4634-8cba-ffa38ff466c5", "address": "fa:16:3e:a5:00:bc", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02ad8941-57", "ovs_interfaceid": "02ad8941-576b-4634-8cba-ffa38ff466c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.934609] env[62820]: DEBUG nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1862.935432] env[62820]: DEBUG nova.network.neutron [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1862.995126] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696470, 'name': ReconfigVM_Task, 'duration_secs': 0.624753} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.995416] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Reconfigured VM instance instance-00000071 to attach disk [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70/c15bbb69-84a0-4fda-a509-66218b9c9f70.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1862.996270] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8962319-edab-4c19-9574-9e1132636fd2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.003079] env[62820]: DEBUG nova.policy [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe4b58f7f5bd405db5c7f8b630032aa1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'accd5c1cf55248b780b00e33faf79fa0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1863.008308] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1863.008308] env[62820]: value = "task-1696476" [ 1863.008308] env[62820]: _type = "Task" [ 1863.008308] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.020991] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696476, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.142136] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a79dc2d-5a40-4a89-925f-a50f5c26ebf5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.155509] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83673c1b-6816-4721-99bf-cd2658b351f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.164677] env[62820]: DEBUG oslo_vmware.api [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Task: {'id': task-1696473, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.509808} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.189108] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1863.189349] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1863.189535] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1863.189718] env[62820]: INFO nova.compute.manager [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1863.189969] env[62820]: DEBUG oslo.service.loopingcall [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1863.190422] env[62820]: DEBUG nova.compute.manager [-] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1863.190521] env[62820]: DEBUG nova.network.neutron [-] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1863.193035] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cda6e44-cf94-4b4c-b628-cc6d7ce5bdce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.200503] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d917fd0f-069f-4175-a554-a1b110e94232 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.214201] env[62820]: DEBUG nova.compute.provider_tree [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1863.223416] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.282169] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 7e4596bf-a8b0-4502-b80b-da372d1fba06] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1863.381330] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696475, 'name': ReconfigVM_Task, 'duration_secs': 0.227776} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.381940] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1863.383209] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d40b88-08cf-4a81-9017-74715d891085 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.413370] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] a8803178-7fa3-42ea-824c-901063673062/a8803178-7fa3-42ea-824c-901063673062.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1863.413860] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-969d4ad3-5d79-4bbc-87da-fd5b5352ab0d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.435156] env[62820]: DEBUG nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1863.436811] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1863.436811] env[62820]: value = "task-1696477" [ 1863.436811] env[62820]: _type = "Task" [ 1863.436811] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.437647] env[62820]: DEBUG oslo_concurrency.lockutils [req-f60c958b-511b-4914-8c18-deb6dc731962 req-ca83289c-f249-4b20-8caa-aed1fbdad50b service nova] Releasing lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.443289] env[62820]: DEBUG nova.network.neutron [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Successfully created port: 023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1863.449189] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696477, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.519859] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696476, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.720024] env[62820]: DEBUG nova.scheduler.client.report [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1863.726407] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.777878] env[62820]: DEBUG nova.compute.manager [req-7bc3c6e4-d86f-460c-b7ec-7de255a6cf32 req-65433005-5612-408d-a4bb-3229fe15bed2 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Received event network-vif-deleted-26b4189b-0a18-4e7c-b07b-c63278a422e5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1863.778101] env[62820]: INFO nova.compute.manager [req-7bc3c6e4-d86f-460c-b7ec-7de255a6cf32 req-65433005-5612-408d-a4bb-3229fe15bed2 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Neutron deleted interface 26b4189b-0a18-4e7c-b07b-c63278a422e5; detaching it from the instance and deleting it from the info cache [ 1863.778289] env[62820]: DEBUG nova.network.neutron [req-7bc3c6e4-d86f-460c-b7ec-7de255a6cf32 req-65433005-5612-408d-a4bb-3229fe15bed2 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.785887] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 9114a81d-86a9-493b-9c07-c4724a0588ac] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1863.951988] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696477, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.019470] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696476, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.179621] env[62820]: DEBUG nova.network.neutron [-] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.224933] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.228067] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.804s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.230823] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.321s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.231769] env[62820]: INFO nova.compute.claims [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1864.254614] env[62820]: INFO nova.scheduler.client.report [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleted allocations for instance 0dd0e112-7a7c-4b37-8938-bb98aab2d485 [ 1864.280963] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06bf013e-310c-44a9-9acd-a560d10d9ff8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.290966] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817fa3e4-6890-4705-8e36-fb7eba4e2150 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.302493] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b7c9f518-c908-42cc-ba09-59b0f8431f68] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1864.325726] env[62820]: DEBUG nova.compute.manager [req-7bc3c6e4-d86f-460c-b7ec-7de255a6cf32 req-65433005-5612-408d-a4bb-3229fe15bed2 service nova] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Detach interface failed, port_id=26b4189b-0a18-4e7c-b07b-c63278a422e5, reason: Instance d519c4e7-0d47-4643-8c31-acb2f6ee38b2 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1864.447577] env[62820]: DEBUG nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1864.455634] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696477, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.474775] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1864.475071] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1864.475202] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1864.475387] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1864.475532] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1864.475677] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1864.475880] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1864.476048] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1864.476220] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1864.476384] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1864.476557] env[62820]: DEBUG nova.virt.hardware [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1864.477427] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b7a62b-87d0-4161-b360-0315a970ea47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.485494] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106ec176-b0fb-4437-8116-755b5d526d58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.519491] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696476, 'name': Rename_Task, 'duration_secs': 1.255665} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.519769] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1864.520010] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b8a1396-f0ce-4278-aca1-0956c0afef71 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.525313] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1864.525313] env[62820]: value = "task-1696478" [ 1864.525313] env[62820]: _type = "Task" [ 1864.525313] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.532243] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.683662] env[62820]: INFO nova.compute.manager [-] [instance: d519c4e7-0d47-4643-8c31-acb2f6ee38b2] Took 1.49 seconds to deallocate network for instance. [ 1864.725646] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.761708] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ba46340f-61ac-452f-802c-12974a72d79d tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "0dd0e112-7a7c-4b37-8938-bb98aab2d485" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.401s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.806470] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: b6c58867-914e-4e6e-8092-fc8991dc87f7] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1864.953472] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696477, 'name': ReconfigVM_Task, 'duration_secs': 1.084636} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.953799] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfigured VM instance instance-00000066 to attach disk [datastore1] a8803178-7fa3-42ea-824c-901063673062/a8803178-7fa3-42ea-824c-901063673062.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1864.954165] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance 'a8803178-7fa3-42ea-824c-901063673062' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1865.038553] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696478, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.050483] env[62820]: DEBUG nova.network.neutron [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Successfully updated port: 023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1865.190951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.228886] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.309501] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: ab21fd61-3a44-42fa-92be-51214b0a9a1e] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1865.416768] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7151f44-ecb5-428f-bf60-5fe811321a47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.424895] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9f0efe-cd07-4a79-998c-78915bfbcac1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.455879] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10290e7f-c67f-44df-b742-82835abbff6e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.460279] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf7cb29-78c7-4e42-bc8d-d82dfe3a3b66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.482613] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433894b3-d4f8-4fb4-866e-d928758968ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.487006] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2297416b-f437-41dc-8fcf-44761b3a1901 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.499429] env[62820]: DEBUG nova.compute.provider_tree [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.516369] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance 'a8803178-7fa3-42ea-824c-901063673062' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1865.520756] env[62820]: DEBUG nova.scheduler.client.report [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1865.538821] env[62820]: DEBUG oslo_vmware.api [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696478, 'name': PowerOnVM_Task, 'duration_secs': 0.848239} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.539120] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1865.539381] env[62820]: INFO nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Took 11.81 seconds to spawn the instance on the hypervisor. [ 1865.539629] env[62820]: DEBUG nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1865.540975] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa61a6c-7b41-424e-8b6a-5e495caea0cc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.552434] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.552577] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.552722] env[62820]: DEBUG nova.network.neutron [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1865.726330] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.801280] env[62820]: DEBUG nova.compute.manager [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Received event network-vif-plugged-023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1865.801567] env[62820]: DEBUG oslo_concurrency.lockutils [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] Acquiring lock "4d69baaa-83da-4c5f-b88f-928693505520-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.801892] env[62820]: DEBUG oslo_concurrency.lockutils [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] Lock "4d69baaa-83da-4c5f-b88f-928693505520-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.802108] env[62820]: DEBUG oslo_concurrency.lockutils [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] Lock "4d69baaa-83da-4c5f-b88f-928693505520-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.802256] env[62820]: DEBUG nova.compute.manager [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] No waiting events found dispatching network-vif-plugged-023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1865.802422] env[62820]: WARNING nova.compute.manager [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Received unexpected event network-vif-plugged-023e5277-4e70-4789-bcb4-9a410cba8ec4 for instance with vm_state building and task_state spawning. [ 1865.802579] env[62820]: DEBUG nova.compute.manager [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Received event network-changed-023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1865.802734] env[62820]: DEBUG nova.compute.manager [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Refreshing instance network info cache due to event network-changed-023e5277-4e70-4789-bcb4-9a410cba8ec4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1865.802897] env[62820]: DEBUG oslo_concurrency.lockutils [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] Acquiring lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.814256] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: aa98dbb0-5ff7-4da5-a365-2b55a8bd2216] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1865.845572] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.845815] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.846025] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.846213] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.846385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.848269] env[62820]: INFO nova.compute.manager [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Terminating instance [ 1866.028067] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.798s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.028584] env[62820]: DEBUG nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1866.031266] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.841s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.031494] env[62820]: DEBUG nova.objects.instance [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lazy-loading 'resources' on Instance uuid d519c4e7-0d47-4643-8c31-acb2f6ee38b2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1866.058342] env[62820]: INFO nova.compute.manager [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Took 19.44 seconds to build instance. [ 1866.099316] env[62820]: DEBUG nova.network.neutron [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1866.226022] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.245097] env[62820]: DEBUG nova.network.neutron [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Updating instance_info_cache with network_info: [{"id": "023e5277-4e70-4789-bcb4-9a410cba8ec4", "address": "fa:16:3e:d5:33:be", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap023e5277-4e", "ovs_interfaceid": "023e5277-4e70-4789-bcb4-9a410cba8ec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.317519] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 58a26c98-cbf9-491f-8d2c-20281c3d7771] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1866.352853] env[62820]: DEBUG nova.compute.manager [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1866.353130] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1866.354021] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f93c11f-4726-42fb-8239-4a8c1ee125a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.362165] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1866.362430] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0fc3aef-2608-4085-8654-c221e556060f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.377395] env[62820]: DEBUG oslo_vmware.api [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1866.377395] env[62820]: value = "task-1696479" [ 1866.377395] env[62820]: _type = "Task" [ 1866.377395] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.385630] env[62820]: DEBUG oslo_vmware.api [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.533906] env[62820]: DEBUG nova.compute.utils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1866.542274] env[62820]: DEBUG nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1866.542623] env[62820]: DEBUG nova.network.neutron [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1866.561085] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fafc8a88-c169-499f-9b38-416ff39c201b tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.944s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.591160] env[62820]: DEBUG nova.policy [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba8e4dc4cd634bf293d02187fbc77b72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ca1b6f7bda3437eb67f5f765b5864a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1866.727196] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.748270] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.748657] env[62820]: DEBUG nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Instance network_info: |[{"id": "023e5277-4e70-4789-bcb4-9a410cba8ec4", "address": "fa:16:3e:d5:33:be", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap023e5277-4e", "ovs_interfaceid": "023e5277-4e70-4789-bcb4-9a410cba8ec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1866.748985] env[62820]: DEBUG oslo_concurrency.lockutils [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] Acquired lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.749186] env[62820]: DEBUG nova.network.neutron [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Refreshing network info cache for port 023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1866.750447] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:33:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9a1e09ef-7c9c-45d9-9bf4-55b913524948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '023e5277-4e70-4789-bcb4-9a410cba8ec4', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1866.762426] env[62820]: DEBUG oslo.service.loopingcall [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.767427] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1866.768089] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e59cddf-96e7-4544-973b-4d726bf086ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.783941] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34427b80-2542-43a5-bc58-e2c1eb4fee47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.792514] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bde1eb-d3a5-4538-ad6f-264f2be751fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.795395] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1866.795395] env[62820]: value = "task-1696480" [ 1866.795395] env[62820]: _type = "Task" [ 1866.795395] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.825386] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: a06d736c-a704-46e8-a6f7-85d8be40804f] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1866.831092] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903f0e55-d1bf-4a7e-86b0-358b4bffe276 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.836170] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696480, 'name': CreateVM_Task} progress is 10%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.841210] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b4fe5c-14aa-4b12-bb4a-223df40509f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.857230] env[62820]: DEBUG nova.compute.provider_tree [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1866.888057] env[62820]: DEBUG oslo_vmware.api [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696479, 'name': PowerOffVM_Task, 'duration_secs': 0.279645} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.888357] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1866.888535] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1866.888789] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b703c75b-51d2-463a-8a64-ccdc29097414 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.927958] env[62820]: INFO nova.compute.manager [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Rescuing [ 1866.928227] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.928371] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.928549] env[62820]: DEBUG nova.network.neutron [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1866.963844] env[62820]: DEBUG nova.network.neutron [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Successfully created port: 41529505-0e12-447f-ab2d-6cc5935c3a5c {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1867.046509] env[62820]: DEBUG nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1867.138199] env[62820]: DEBUG nova.network.neutron [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Updated VIF entry in instance network info cache for port 023e5277-4e70-4789-bcb4-9a410cba8ec4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1867.138662] env[62820]: DEBUG nova.network.neutron [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Updating instance_info_cache with network_info: [{"id": "023e5277-4e70-4789-bcb4-9a410cba8ec4", "address": "fa:16:3e:d5:33:be", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap023e5277-4e", "ovs_interfaceid": "023e5277-4e70-4789-bcb4-9a410cba8ec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.219668] env[62820]: DEBUG nova.network.neutron [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Port f7027439-2429-4746-8bc9-a95ce975c96a binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1867.230661] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696474, 'name': CreateVM_Task, 'duration_secs': 4.081842} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.231983] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1867.231983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.231983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.231983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1867.232314] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7affaa48-043d-468b-bcd9-515838be69ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.238060] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1867.238060] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5250d20d-5603-9a66-178c-b9ba4eb6eb09" [ 1867.238060] env[62820]: _type = "Task" [ 1867.238060] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.246428] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5250d20d-5603-9a66-178c-b9ba4eb6eb09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.306378] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696480, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.337394] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 0ed6ab62-6ae1-4b1a-be2e-a2312334fd86] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1867.359624] env[62820]: DEBUG nova.scheduler.client.report [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1867.558184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.558752] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.625864] env[62820]: DEBUG nova.network.neutron [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Updating instance_info_cache with network_info: [{"id": "ad74c59c-92d9-43b7-8a73-b480a40ae561", "address": "fa:16:3e:70:13:d8", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad74c59c-92", "ovs_interfaceid": "ad74c59c-92d9-43b7-8a73-b480a40ae561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.640926] env[62820]: DEBUG oslo_concurrency.lockutils [req-75efba4f-85c8-408c-abde-a7cc9c718e73 req-5b88a18a-f0c7-421d-982d-f901a7ed3c19 service nova] Releasing lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.752039] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5250d20d-5603-9a66-178c-b9ba4eb6eb09, 'name': SearchDatastore_Task, 'duration_secs': 0.018611} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.752499] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.752868] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1867.753263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.753512] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.754032] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1867.754193] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7960f79b-222e-4ff6-97a8-642d3b9567db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.764148] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1867.764336] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1867.765072] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29449269-9f57-40e2-9121-478b8106af58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.771090] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1867.771090] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5243b5ab-7660-b624-5fd1-17cc4f4c1333" [ 1867.771090] env[62820]: _type = "Task" [ 1867.771090] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.780784] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5243b5ab-7660-b624-5fd1-17cc4f4c1333, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.805093] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696480, 'name': CreateVM_Task, 'duration_secs': 0.540632} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.805261] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1867.806254] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.806428] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.806752] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1867.807009] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fcc7b2a-3892-4df1-9120-5aad67690d5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.811210] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1867.811210] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d50ad5-1168-ff7d-3556-99e98f812ca2" [ 1867.811210] env[62820]: _type = "Task" [ 1867.811210] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.818836] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d50ad5-1168-ff7d-3556-99e98f812ca2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.839999] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 069f58d6-f6bc-4ded-8274-6fed7c2f45b3] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1867.864897] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.834s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.893377] env[62820]: INFO nova.scheduler.client.report [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Deleted allocations for instance d519c4e7-0d47-4643-8c31-acb2f6ee38b2 [ 1868.060129] env[62820]: DEBUG nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1868.062095] env[62820]: DEBUG nova.compute.utils [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1868.086115] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1868.086398] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1868.086532] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1868.086745] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1868.086979] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1868.087278] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1868.087543] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1868.087670] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1868.087864] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1868.088047] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1868.088227] env[62820]: DEBUG nova.virt.hardware [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1868.089121] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2cc9f8-44b5-490e-936c-e3310e928214 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.098198] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4414d84b-0707-4dbb-bc59-41469400d726 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.102804] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1868.103015] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1868.103226] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleting the datastore file [datastore1] eafe98b7-a67d-4bab-bfc0-8367ae069d31 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1868.103755] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0264ed9b-a9c1-4f76-a40e-f9b8754e65f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.117267] env[62820]: DEBUG oslo_vmware.api [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for the task: (returnval){ [ 1868.117267] env[62820]: value = "task-1696482" [ 1868.117267] env[62820]: _type = "Task" [ 1868.117267] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.124445] env[62820]: DEBUG oslo_vmware.api [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.129015] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "refresh_cache-c15bbb69-84a0-4fda-a509-66218b9c9f70" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.239828] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "a8803178-7fa3-42ea-824c-901063673062-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.240207] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.240376] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.282211] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5243b5ab-7660-b624-5fd1-17cc4f4c1333, 'name': SearchDatastore_Task, 'duration_secs': 0.009855} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.283085] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d20f8ad5-e927-4bd0-a796-3799b4c5a91e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.288656] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1868.288656] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cbb210-4d5e-5958-41c1-8a4f73da299a" [ 1868.288656] env[62820]: _type = "Task" [ 1868.288656] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.296079] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cbb210-4d5e-5958-41c1-8a4f73da299a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.321810] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d50ad5-1168-ff7d-3556-99e98f812ca2, 'name': SearchDatastore_Task, 'duration_secs': 0.019354} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.322114] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.322348] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1868.322556] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.343100] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 4fa6e38f-dcca-4f65-86d6-1c585deb1c13] Instance has had 0 of 5 cleanup attempts {{(pid=62820) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11599}} [ 1868.400856] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10badb2c-7794-4078-b0c9-3cfe15587e41 tempest-ServerAddressesNegativeTestJSON-1369939204 tempest-ServerAddressesNegativeTestJSON-1369939204-project-member] Lock "d519c4e7-0d47-4643-8c31-acb2f6ee38b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.974s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.451181] env[62820]: DEBUG nova.compute.manager [req-8da7a4d8-80ab-4c1f-b66f-69dddbd30010 req-29d9ef3a-cdd5-4373-80c1-34e28ab2a267 service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Received event network-vif-plugged-41529505-0e12-447f-ab2d-6cc5935c3a5c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1868.451457] env[62820]: DEBUG oslo_concurrency.lockutils [req-8da7a4d8-80ab-4c1f-b66f-69dddbd30010 req-29d9ef3a-cdd5-4373-80c1-34e28ab2a267 service nova] Acquiring lock "2fe561a2-57ad-4385-830e-61cd274c7123-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.451611] env[62820]: DEBUG oslo_concurrency.lockutils [req-8da7a4d8-80ab-4c1f-b66f-69dddbd30010 req-29d9ef3a-cdd5-4373-80c1-34e28ab2a267 service nova] Lock "2fe561a2-57ad-4385-830e-61cd274c7123-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.451774] env[62820]: DEBUG oslo_concurrency.lockutils [req-8da7a4d8-80ab-4c1f-b66f-69dddbd30010 req-29d9ef3a-cdd5-4373-80c1-34e28ab2a267 service nova] Lock "2fe561a2-57ad-4385-830e-61cd274c7123-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.451936] env[62820]: DEBUG nova.compute.manager [req-8da7a4d8-80ab-4c1f-b66f-69dddbd30010 req-29d9ef3a-cdd5-4373-80c1-34e28ab2a267 service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] No waiting events found dispatching network-vif-plugged-41529505-0e12-447f-ab2d-6cc5935c3a5c {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1868.452285] env[62820]: WARNING nova.compute.manager [req-8da7a4d8-80ab-4c1f-b66f-69dddbd30010 req-29d9ef3a-cdd5-4373-80c1-34e28ab2a267 service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Received unexpected event network-vif-plugged-41529505-0e12-447f-ab2d-6cc5935c3a5c for instance with vm_state building and task_state spawning. [ 1868.539476] env[62820]: DEBUG nova.network.neutron [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Successfully updated port: 41529505-0e12-447f-ab2d-6cc5935c3a5c {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1868.564277] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.627321] env[62820]: DEBUG oslo_vmware.api [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Task: {'id': task-1696482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177532} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.627575] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1868.627759] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1868.627940] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1868.628128] env[62820]: INFO nova.compute.manager [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1868.628364] env[62820]: DEBUG oslo.service.loopingcall [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.628556] env[62820]: DEBUG nova.compute.manager [-] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1868.628651] env[62820]: DEBUG nova.network.neutron [-] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1868.798940] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cbb210-4d5e-5958-41c1-8a4f73da299a, 'name': SearchDatastore_Task, 'duration_secs': 0.007812} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.799237] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.799597] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d/41666e62-526d-4553-a005-07cbc2321d0d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1868.799843] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.800037] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1868.800285] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68a19a90-0b24-445e-8998-b164e7b28fe1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.802894] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd98e9aa-0d5c-4fed-a102-1f23ad40335a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.809016] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1868.809016] env[62820]: value = "task-1696483" [ 1868.809016] env[62820]: _type = "Task" [ 1868.809016] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.812760] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1868.812935] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1868.813937] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28639d55-4fc6-4937-a29c-4449a9327824 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.819081] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696483, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.822015] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1868.822015] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5233470d-3125-4acb-6662-14106ad89638" [ 1868.822015] env[62820]: _type = "Task" [ 1868.822015] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.829454] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5233470d-3125-4acb-6662-14106ad89638, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.044665] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "refresh_cache-2fe561a2-57ad-4385-830e-61cd274c7123" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.044665] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "refresh_cache-2fe561a2-57ad-4385-830e-61cd274c7123" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.044665] env[62820]: DEBUG nova.network.neutron [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1869.155778] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1869.156085] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e75b9c5-be31-458e-93b8-b3ce1a1effbe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.165688] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1869.165688] env[62820]: value = "task-1696484" [ 1869.165688] env[62820]: _type = "Task" [ 1869.165688] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.182139] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696484, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.322634] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696483, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.330093] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.330237] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.330410] env[62820]: DEBUG nova.network.neutron [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1869.337375] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5233470d-3125-4acb-6662-14106ad89638, 'name': SearchDatastore_Task, 'duration_secs': 0.007951} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.338227] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44b7043d-9197-4acd-81ea-b72a8d50d346 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.344090] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1869.344090] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52202cf0-8328-786d-8e84-844a2eb71a33" [ 1869.344090] env[62820]: _type = "Task" [ 1869.344090] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.354714] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52202cf0-8328-786d-8e84-844a2eb71a33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.358094] env[62820]: DEBUG nova.network.neutron [-] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.590653] env[62820]: DEBUG nova.network.neutron [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1869.650295] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.650774] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.651103] env[62820]: INFO nova.compute.manager [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Attaching volume ce8fe0f5-4703-4d35-897a-774c6b74f0d6 to /dev/sdb [ 1869.675743] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696484, 'name': PowerOffVM_Task, 'duration_secs': 0.276628} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.676900] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1869.677125] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8614b04-731e-41f0-abf7-1bfa67d16025 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.698508] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd5ec5a-1e73-446a-a90d-12f839b27b58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.701900] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5350d917-299e-4aee-8340-1c245644c6a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.710933] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f1be66-5516-45d2-900a-bcecd9ca4a0a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.726801] env[62820]: DEBUG nova.virt.block_device [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updating existing volume attachment record: 307bf658-3ed9-4d90-ae04-0a9146f211d0 {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1869.748977] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1869.749348] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19f78abf-286a-4807-9f0e-5efcd907904d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.756359] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1869.756359] env[62820]: value = "task-1696485" [ 1869.756359] env[62820]: _type = "Task" [ 1869.756359] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.764541] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.819935] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696483, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560803} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.820219] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d/41666e62-526d-4553-a005-07cbc2321d0d.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1869.820539] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1869.820808] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9bc5d9f-2f9e-4dae-9db7-7af7f9752a41 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.832030] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1869.832030] env[62820]: value = "task-1696486" [ 1869.832030] env[62820]: _type = "Task" [ 1869.832030] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.837631] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.854789] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52202cf0-8328-786d-8e84-844a2eb71a33, 'name': SearchDatastore_Task, 'duration_secs': 0.012487} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.855111] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.855873] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4d69baaa-83da-4c5f-b88f-928693505520/4d69baaa-83da-4c5f-b88f-928693505520.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1869.855873] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26dd334d-66f8-4dc2-bf39-bf8b12c5f441 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.860433] env[62820]: INFO nova.compute.manager [-] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Took 1.23 seconds to deallocate network for instance. [ 1869.863292] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1869.863292] env[62820]: value = "task-1696487" [ 1869.863292] env[62820]: _type = "Task" [ 1869.863292] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.874983] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696487, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.182393] env[62820]: DEBUG nova.network.neutron [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Updating instance_info_cache with network_info: [{"id": "41529505-0e12-447f-ab2d-6cc5935c3a5c", "address": "fa:16:3e:89:00:71", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41529505-0e", "ovs_interfaceid": "41529505-0e12-447f-ab2d-6cc5935c3a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.268783] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1870.269018] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1870.269319] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.269480] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.269685] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1870.269941] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f53184f7-86c6-4785-ab99-2abbc0acf949 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.290515] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1870.290831] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1870.291640] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20764128-83fb-4de4-8015-fada8682bba8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.298418] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1870.298418] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527b0d83-d4e8-131f-3967-f467e7ec74bf" [ 1870.298418] env[62820]: _type = "Task" [ 1870.298418] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.307323] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527b0d83-d4e8-131f-3967-f467e7ec74bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.339527] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082093} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.339859] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1870.341068] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474056bf-41b2-451a-9437-98f6efc3a24f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.364959] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d/41666e62-526d-4553-a005-07cbc2321d0d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1870.364959] env[62820]: DEBUG nova.network.neutron [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [{"id": "f7027439-2429-4746-8bc9-a95ce975c96a", "address": "fa:16:3e:80:90:a0", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7027439-24", "ovs_interfaceid": "f7027439-2429-4746-8bc9-a95ce975c96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.366061] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-785be9bd-8636-40df-8aa0-61c9bdd67799 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.381723] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.382018] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.382261] env[62820]: DEBUG nova.objects.instance [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lazy-loading 'resources' on Instance uuid eafe98b7-a67d-4bab-bfc0-8367ae069d31 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1870.393391] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696487, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.395885] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1870.395885] env[62820]: value = "task-1696490" [ 1870.395885] env[62820]: _type = "Task" [ 1870.395885] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.406142] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696490, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.487223] env[62820]: DEBUG nova.compute.manager [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Received event network-changed-41529505-0e12-447f-ab2d-6cc5935c3a5c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1870.487609] env[62820]: DEBUG nova.compute.manager [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Refreshing instance network info cache due to event network-changed-41529505-0e12-447f-ab2d-6cc5935c3a5c. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1870.488730] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] Acquiring lock "refresh_cache-2fe561a2-57ad-4385-830e-61cd274c7123" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.686046] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "refresh_cache-2fe561a2-57ad-4385-830e-61cd274c7123" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.686046] env[62820]: DEBUG nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Instance network_info: |[{"id": "41529505-0e12-447f-ab2d-6cc5935c3a5c", "address": "fa:16:3e:89:00:71", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41529505-0e", "ovs_interfaceid": "41529505-0e12-447f-ab2d-6cc5935c3a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1870.686356] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] Acquired lock "refresh_cache-2fe561a2-57ad-4385-830e-61cd274c7123" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.686541] env[62820]: DEBUG nova.network.neutron [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Refreshing network info cache for port 41529505-0e12-447f-ab2d-6cc5935c3a5c {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1870.687858] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:00:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41529505-0e12-447f-ab2d-6cc5935c3a5c', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1870.695898] env[62820]: DEBUG oslo.service.loopingcall [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1870.696565] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1870.696817] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-592daf31-7839-455f-9d52-3201182c582a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.720529] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1870.720529] env[62820]: value = "task-1696492" [ 1870.720529] env[62820]: _type = "Task" [ 1870.720529] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.732057] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696492, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.812280] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527b0d83-d4e8-131f-3967-f467e7ec74bf, 'name': SearchDatastore_Task, 'duration_secs': 0.030809} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.813190] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e08df408-3ffd-4ec3-9062-c51ee828634f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.819288] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1870.819288] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5263b156-b742-f7e4-5609-1ca383029715" [ 1870.819288] env[62820]: _type = "Task" [ 1870.819288] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.828438] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5263b156-b742-f7e4-5609-1ca383029715, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.882348] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.902776] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696487, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.953919} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.906551] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 4d69baaa-83da-4c5f-b88f-928693505520/4d69baaa-83da-4c5f-b88f-928693505520.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1870.906744] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1870.908502] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1104174-9742-48ca-9b48-24ef62c82441 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.920269] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.922539] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1870.922539] env[62820]: value = "task-1696493" [ 1870.922539] env[62820]: _type = "Task" [ 1870.922539] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.933993] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.095308] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09505a45-ba26-436b-97ce-8f2d060aaf68 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.103562] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa32fe23-4116-43f6-b0e1-dff8e8b46584 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.138343] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324d89e6-6786-4b44-bc01-202115280d0d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.146689] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c35912-551a-4f9c-9b15-9136e4009b60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.162784] env[62820]: DEBUG nova.compute.provider_tree [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1871.231325] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696492, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.332532] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5263b156-b742-f7e4-5609-1ca383029715, 'name': SearchDatastore_Task, 'duration_secs': 0.020489} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.332809] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.333087] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. {{(pid=62820) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1871.333364] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e40352e-eb90-49e9-88b9-14633e3afcd6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.340990] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1871.340990] env[62820]: value = "task-1696494" [ 1871.340990] env[62820]: _type = "Task" [ 1871.340990] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.348605] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696494, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.400476] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e44a31-d72d-4209-8de0-8039cddbd7a7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.413362] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696490, 'name': ReconfigVM_Task, 'duration_secs': 0.768778} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.414109] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa04fd9e-f55c-4cbb-ae1b-4ba13aedfe19 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.416682] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d/41666e62-526d-4553-a005-07cbc2321d0d.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1871.417336] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-638f5231-92eb-42d5-b484-170eaaacf922 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.424097] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1871.424097] env[62820]: value = "task-1696495" [ 1871.424097] env[62820]: _type = "Task" [ 1871.424097] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.445836] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143111} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.450315] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1871.450761] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696495, 'name': Rename_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.451573] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905d749f-0754-4026-8b16-8d00c781a101 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.476740] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 4d69baaa-83da-4c5f-b88f-928693505520/4d69baaa-83da-4c5f-b88f-928693505520.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1871.477105] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f069897-c66e-4514-806d-926d575a2272 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.499865] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1871.499865] env[62820]: value = "task-1696496" [ 1871.499865] env[62820]: _type = "Task" [ 1871.499865] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.507868] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696496, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.533387] env[62820]: DEBUG nova.network.neutron [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Updated VIF entry in instance network info cache for port 41529505-0e12-447f-ab2d-6cc5935c3a5c. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1871.533805] env[62820]: DEBUG nova.network.neutron [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Updating instance_info_cache with network_info: [{"id": "41529505-0e12-447f-ab2d-6cc5935c3a5c", "address": "fa:16:3e:89:00:71", "network": {"id": "f07df699-b39c-4f84-afdd-68667fbf1478", "bridge": "br-int", "label": "tempest-ServersTestJSON-766131652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ca1b6f7bda3437eb67f5f765b5864a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41529505-0e", "ovs_interfaceid": "41529505-0e12-447f-ab2d-6cc5935c3a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.668806] env[62820]: DEBUG nova.scheduler.client.report [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1871.734926] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696492, 'name': CreateVM_Task, 'duration_secs': 0.791349} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.735238] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1871.736110] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.736394] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.736831] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1871.737116] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aff47114-1743-4624-ba0d-b400e0f5d31e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.744151] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1871.744151] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]529c1e9c-0202-cbdc-af03-aefc0360fb21" [ 1871.744151] env[62820]: _type = "Task" [ 1871.744151] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.758762] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529c1e9c-0202-cbdc-af03-aefc0360fb21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.852737] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696494, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.934797] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696495, 'name': Rename_Task, 'duration_secs': 0.250738} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.936161] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1871.936161] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52f2d9c1-839c-44e3-8e1b-137d8b6c5c24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.942376] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1871.942376] env[62820]: value = "task-1696497" [ 1871.942376] env[62820]: _type = "Task" [ 1871.942376] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.950238] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696497, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.011101] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696496, 'name': ReconfigVM_Task, 'duration_secs': 0.475156} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.011418] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 4d69baaa-83da-4c5f-b88f-928693505520/4d69baaa-83da-4c5f-b88f-928693505520.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1872.012092] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f56112b-ab91-4d46-8ab2-c9791280e2bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.019050] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1872.019050] env[62820]: value = "task-1696498" [ 1872.019050] env[62820]: _type = "Task" [ 1872.019050] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.027398] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696498, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.043323] env[62820]: DEBUG oslo_concurrency.lockutils [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] Releasing lock "refresh_cache-2fe561a2-57ad-4385-830e-61cd274c7123" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.043323] env[62820]: DEBUG nova.compute.manager [req-ea604659-7491-4065-bddb-748d2c5f051b req-76be7e38-c2a6-48ae-9d41-86dccad0bd9b service nova] [instance: eafe98b7-a67d-4bab-bfc0-8367ae069d31] Received event network-vif-deleted-8ba6813f-c30f-416d-b888-4a33a10698ef {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1872.176145] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.794s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.200381] env[62820]: INFO nova.scheduler.client.report [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Deleted allocations for instance eafe98b7-a67d-4bab-bfc0-8367ae069d31 [ 1872.255537] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]529c1e9c-0202-cbdc-af03-aefc0360fb21, 'name': SearchDatastore_Task, 'duration_secs': 0.050601} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.255841] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.256108] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1872.256349] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.256497] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.256677] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1872.257624] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04b86080-5d4f-498d-8374-702b52c8d63c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.265857] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1872.265986] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1872.266647] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdd9bc46-68f9-4ad3-af08-47f2d3505019 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.271963] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1872.271963] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521941d0-9cae-e618-2b32-64a04bacb6b9" [ 1872.271963] env[62820]: _type = "Task" [ 1872.271963] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.280008] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521941d0-9cae-e618-2b32-64a04bacb6b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.351062] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696494, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539003} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.351364] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. [ 1872.352179] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e776592-a26f-408d-b9fa-190b235d75cc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.378089] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1872.378455] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58b4bbed-1f4e-4fc7-95b6-d4c20d53ea82 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.398053] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1872.398053] env[62820]: value = "task-1696499" [ 1872.398053] env[62820]: _type = "Task" [ 1872.398053] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.410264] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.453457] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696497, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.533057] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696498, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.560808] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7869978-e61f-475a-970f-873dc7f70202 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.584609] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083522e1-887f-4b41-82f0-d832b2ee0d66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.592345] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance 'a8803178-7fa3-42ea-824c-901063673062' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1872.711727] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c936394c-1322-414d-b868-4a4dea8234da tempest-ServerRescueNegativeTestJSON-730839076 tempest-ServerRescueNegativeTestJSON-730839076-project-member] Lock "eafe98b7-a67d-4bab-bfc0-8367ae069d31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.865s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.731768] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "44889009-b397-463f-be67-d67126d3fa5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.731920] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "44889009-b397-463f-be67-d67126d3fa5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.783429] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521941d0-9cae-e618-2b32-64a04bacb6b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009416} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.784215] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ab89e7c-9dd7-4c6b-aea8-6db40d565d42 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.789204] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1872.789204] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ff72a6-a646-6d26-8b10-b4987ebdd311" [ 1872.789204] env[62820]: _type = "Task" [ 1872.789204] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.797291] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ff72a6-a646-6d26-8b10-b4987ebdd311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.908836] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696499, 'name': ReconfigVM_Task, 'duration_secs': 0.317098} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.909155] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Reconfigured VM instance instance-00000071 to attach disk [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1872.910185] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a920498-967f-4f3c-ac33-ddee091452f5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.937307] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d315b37b-b033-4f3a-a99b-c9a2938d6f17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.956789] env[62820]: DEBUG oslo_vmware.api [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696497, 'name': PowerOnVM_Task, 'duration_secs': 0.512857} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.958035] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1872.958266] env[62820]: INFO nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Took 12.61 seconds to spawn the instance on the hypervisor. [ 1872.958449] env[62820]: DEBUG nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1872.958770] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1872.958770] env[62820]: value = "task-1696501" [ 1872.958770] env[62820]: _type = "Task" [ 1872.958770] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.959498] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb975a6-09be-4acc-ba3c-487932e34d3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.973301] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.033588] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696498, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.100779] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1873.101098] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ee6e54d-609b-4d38-96f6-5e2a62a25e68 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.109177] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1873.109177] env[62820]: value = "task-1696502" [ 1873.109177] env[62820]: _type = "Task" [ 1873.109177] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.119128] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696502, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.235640] env[62820]: DEBUG nova.compute.manager [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1873.312018] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52ff72a6-a646-6d26-8b10-b4987ebdd311, 'name': SearchDatastore_Task, 'duration_secs': 0.014891} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.312018] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.312018] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2fe561a2-57ad-4385-830e-61cd274c7123/2fe561a2-57ad-4385-830e-61cd274c7123.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1873.317021] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a4288eb-a7b4-47e7-97af-189e40b10b59 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.321669] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1873.321669] env[62820]: value = "task-1696503" [ 1873.321669] env[62820]: _type = "Task" [ 1873.321669] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.329794] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.481143] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696501, 'name': ReconfigVM_Task, 'duration_secs': 0.166995} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.481444] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1873.481764] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f29325a3-dd61-4a1f-bca3-de4784ea05a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.491678] env[62820]: INFO nova.compute.manager [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Took 19.06 seconds to build instance. [ 1873.495061] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1873.495061] env[62820]: value = "task-1696504" [ 1873.495061] env[62820]: _type = "Task" [ 1873.495061] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.506999] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.534470] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696498, 'name': Rename_Task, 'duration_secs': 1.153436} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.534665] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1873.534867] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64ab7e9d-dd80-4067-8c8e-f8155dfd64d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.541675] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1873.541675] env[62820]: value = "task-1696505" [ 1873.541675] env[62820]: _type = "Task" [ 1873.541675] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.550592] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696505, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.621345] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696502, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.762233] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.762922] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.766174] env[62820]: INFO nova.compute.claims [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1873.834105] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696503, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.994925] env[62820]: DEBUG oslo_concurrency.lockutils [None req-260bab72-63ab-4a0c-8619-95a0f277ceb7 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "41666e62-526d-4553-a005-07cbc2321d0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.571s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.013353] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696504, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.053861] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696505, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.079647] env[62820]: DEBUG nova.compute.manager [req-d4340984-c264-47f1-9f1f-125b0b8a34b7 req-b33f2308-91f8-4ce2-8278-9e5907bc1229 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Received event network-changed-02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1874.079895] env[62820]: DEBUG nova.compute.manager [req-d4340984-c264-47f1-9f1f-125b0b8a34b7 req-b33f2308-91f8-4ce2-8278-9e5907bc1229 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Refreshing instance network info cache due to event network-changed-02ad8941-576b-4634-8cba-ffa38ff466c5. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1874.080239] env[62820]: DEBUG oslo_concurrency.lockutils [req-d4340984-c264-47f1-9f1f-125b0b8a34b7 req-b33f2308-91f8-4ce2-8278-9e5907bc1229 service nova] Acquiring lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.080295] env[62820]: DEBUG oslo_concurrency.lockutils [req-d4340984-c264-47f1-9f1f-125b0b8a34b7 req-b33f2308-91f8-4ce2-8278-9e5907bc1229 service nova] Acquired lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.080552] env[62820]: DEBUG nova.network.neutron [req-d4340984-c264-47f1-9f1f-125b0b8a34b7 req-b33f2308-91f8-4ce2-8278-9e5907bc1229 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Refreshing network info cache for port 02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1874.121789] env[62820]: DEBUG oslo_vmware.api [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696502, 'name': PowerOnVM_Task, 'duration_secs': 0.825828} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.121789] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1874.121789] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ba7338-d042-4145-a180-3a2b3d538c1a tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance 'a8803178-7fa3-42ea-824c-901063673062' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1874.332709] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.757927} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.333136] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 2fe561a2-57ad-4385-830e-61cd274c7123/2fe561a2-57ad-4385-830e-61cd274c7123.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1874.333459] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1874.333767] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4809d591-827a-4a87-bc9e-ecccd05254f4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.340929] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1874.340929] env[62820]: value = "task-1696506" [ 1874.340929] env[62820]: _type = "Task" [ 1874.340929] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.348753] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696506, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.506514] env[62820]: DEBUG oslo_vmware.api [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696504, 'name': PowerOnVM_Task, 'duration_secs': 0.671382} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.506801] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1874.510028] env[62820]: DEBUG nova.compute.manager [None req-f2593b5f-e513-4f02-b0c6-4ce19d77cbb2 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1874.510377] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f364e25b-5815-4aa6-bd0b-63a63d21f1fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.556625] env[62820]: DEBUG oslo_vmware.api [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696505, 'name': PowerOnVM_Task, 'duration_secs': 0.56578} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.556888] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1874.557131] env[62820]: INFO nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Took 10.11 seconds to spawn the instance on the hypervisor. [ 1874.557830] env[62820]: DEBUG nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1874.558112] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3c5789-9a8e-4de2-b94b-0ab61a5c1c69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.786108] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1874.786416] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1874.787427] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6571c9-7b05-4da4-a905-c0289e3b4c88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.806646] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e785fd55-b02d-484e-903b-1d3ba6a7fd33 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.831588] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6/volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1874.832596] env[62820]: DEBUG nova.network.neutron [req-d4340984-c264-47f1-9f1f-125b0b8a34b7 req-b33f2308-91f8-4ce2-8278-9e5907bc1229 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updated VIF entry in instance network info cache for port 02ad8941-576b-4634-8cba-ffa38ff466c5. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1874.832944] env[62820]: DEBUG nova.network.neutron [req-d4340984-c264-47f1-9f1f-125b0b8a34b7 req-b33f2308-91f8-4ce2-8278-9e5907bc1229 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [{"id": "02ad8941-576b-4634-8cba-ffa38ff466c5", "address": "fa:16:3e:a5:00:bc", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02ad8941-57", "ovs_interfaceid": "02ad8941-576b-4634-8cba-ffa38ff466c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.836415] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb091bf0-8cf8-48c3-8c45-4b282c07cc54 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.860388] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696506, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.288439} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.863946] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1874.864320] env[62820]: DEBUG oslo_vmware.api [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1874.864320] env[62820]: value = "task-1696507" [ 1874.864320] env[62820]: _type = "Task" [ 1874.864320] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.865683] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfba93a-b369-4896-a2e1-0e43af239ae1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.901952] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 2fe561a2-57ad-4385-830e-61cd274c7123/2fe561a2-57ad-4385-830e-61cd274c7123.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1874.902156] env[62820]: DEBUG oslo_vmware.api [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696507, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.903287] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a780c1f-5d82-4f3c-9efd-320a849f2fe6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.928173] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1874.928173] env[62820]: value = "task-1696508" [ 1874.928173] env[62820]: _type = "Task" [ 1874.928173] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.938316] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696508, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.060152] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa9e84a-8a70-4c90-9ba9-8293bd8b821e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.073610] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0a8644-a32d-45ac-a91e-c6e43d77157c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.079486] env[62820]: INFO nova.compute.manager [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Took 17.36 seconds to build instance. [ 1875.108273] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e8f56a-385b-44ec-9c20-eacb24547705 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.117968] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93f35c3-c880-4b4d-b816-0081093d4137 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.135281] env[62820]: DEBUG nova.compute.provider_tree [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.351175] env[62820]: DEBUG oslo_concurrency.lockutils [req-d4340984-c264-47f1-9f1f-125b0b8a34b7 req-b33f2308-91f8-4ce2-8278-9e5907bc1229 service nova] Releasing lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.378596] env[62820]: DEBUG oslo_vmware.api [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696507, 'name': ReconfigVM_Task, 'duration_secs': 0.448457} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.378897] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6/volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1875.385196] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-803783f2-6b4e-465a-b824-d59771685f2c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.398700] env[62820]: DEBUG oslo_vmware.api [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1875.398700] env[62820]: value = "task-1696509" [ 1875.398700] env[62820]: _type = "Task" [ 1875.398700] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.411808] env[62820]: DEBUG oslo_vmware.api [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696509, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.439014] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696508, 'name': ReconfigVM_Task, 'duration_secs': 0.376696} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.439780] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 2fe561a2-57ad-4385-830e-61cd274c7123/2fe561a2-57ad-4385-830e-61cd274c7123.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1875.441236] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fcb84ab5-c491-45e4-a6ba-3e5aef8e046b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.449532] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1875.449532] env[62820]: value = "task-1696510" [ 1875.449532] env[62820]: _type = "Task" [ 1875.449532] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.458552] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696510, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.479394] env[62820]: DEBUG nova.compute.manager [req-439395a6-7844-4c61-9306-c08916a202b1 req-06a5ff55-bd1b-485a-a6ef-716a0299bb50 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Received event network-changed-023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1875.479679] env[62820]: DEBUG nova.compute.manager [req-439395a6-7844-4c61-9306-c08916a202b1 req-06a5ff55-bd1b-485a-a6ef-716a0299bb50 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Refreshing instance network info cache due to event network-changed-023e5277-4e70-4789-bcb4-9a410cba8ec4. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1875.480058] env[62820]: DEBUG oslo_concurrency.lockutils [req-439395a6-7844-4c61-9306-c08916a202b1 req-06a5ff55-bd1b-485a-a6ef-716a0299bb50 service nova] Acquiring lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.480232] env[62820]: DEBUG oslo_concurrency.lockutils [req-439395a6-7844-4c61-9306-c08916a202b1 req-06a5ff55-bd1b-485a-a6ef-716a0299bb50 service nova] Acquired lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.480434] env[62820]: DEBUG nova.network.neutron [req-439395a6-7844-4c61-9306-c08916a202b1 req-06a5ff55-bd1b-485a-a6ef-716a0299bb50 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Refreshing network info cache for port 023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1875.581610] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3dd8730c-f15f-4e10-a2e6-9b9bfc8e9b4c tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.364s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.639223] env[62820]: DEBUG nova.scheduler.client.report [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1875.909250] env[62820]: DEBUG oslo_vmware.api [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696509, 'name': ReconfigVM_Task, 'duration_secs': 0.168951} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.909487] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1875.963131] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696510, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.144868] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.146527] env[62820]: DEBUG nova.compute.manager [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1876.290869] env[62820]: DEBUG nova.network.neutron [req-439395a6-7844-4c61-9306-c08916a202b1 req-06a5ff55-bd1b-485a-a6ef-716a0299bb50 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Updated VIF entry in instance network info cache for port 023e5277-4e70-4789-bcb4-9a410cba8ec4. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1876.291256] env[62820]: DEBUG nova.network.neutron [req-439395a6-7844-4c61-9306-c08916a202b1 req-06a5ff55-bd1b-485a-a6ef-716a0299bb50 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Updating instance_info_cache with network_info: [{"id": "023e5277-4e70-4789-bcb4-9a410cba8ec4", "address": "fa:16:3e:d5:33:be", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap023e5277-4e", "ovs_interfaceid": "023e5277-4e70-4789-bcb4-9a410cba8ec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.471979] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696510, 'name': Rename_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.652188] env[62820]: DEBUG nova.compute.utils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1876.653661] env[62820]: DEBUG nova.compute.manager [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Not allocating networking since 'none' was specified. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1876.793754] env[62820]: DEBUG oslo_concurrency.lockutils [req-439395a6-7844-4c61-9306-c08916a202b1 req-06a5ff55-bd1b-485a-a6ef-716a0299bb50 service nova] Releasing lock "refresh_cache-4d69baaa-83da-4c5f-b88f-928693505520" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.828693] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "70ef320e-16c4-4aa8-8770-4828f71868f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.828935] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "70ef320e-16c4-4aa8-8770-4828f71868f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.962596] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696510, 'name': Rename_Task, 'duration_secs': 1.324146} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.962916] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1876.964063] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cf958e5-fc95-44da-87e7-e828a6bd067a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.965644] env[62820]: DEBUG nova.objects.instance [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lazy-loading 'flavor' on Instance uuid 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.971923] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1876.971923] env[62820]: value = "task-1696511" [ 1876.971923] env[62820]: _type = "Task" [ 1876.971923] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.980481] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696511, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.098907] env[62820]: DEBUG nova.network.neutron [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Port f7027439-2429-4746-8bc9-a95ce975c96a binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1877.099186] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.099363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.099561] env[62820]: DEBUG nova.network.neutron [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1877.155265] env[62820]: DEBUG nova.compute.manager [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1877.331806] env[62820]: DEBUG nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1877.344577] env[62820]: INFO nova.compute.manager [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Rebuilding instance [ 1877.425281] env[62820]: DEBUG nova.compute.manager [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1877.426283] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088a550c-021c-45be-baf0-6807e4164760 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.469976] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6a1a3b71-e4e1-4784-95b5-353c981edd56 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.819s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.485489] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696511, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.841961] env[62820]: DEBUG nova.network.neutron [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [{"id": "f7027439-2429-4746-8bc9-a95ce975c96a", "address": "fa:16:3e:80:90:a0", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7027439-24", "ovs_interfaceid": "f7027439-2429-4746-8bc9-a95ce975c96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.850751] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.850994] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.852484] env[62820]: INFO nova.compute.claims [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1877.983154] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696511, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.164698] env[62820]: DEBUG nova.compute.manager [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1878.188394] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1878.188679] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1878.188885] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1878.189096] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1878.189252] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1878.189467] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1878.189688] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1878.189850] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1878.190127] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1878.190407] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1878.190700] env[62820]: DEBUG nova.virt.hardware [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1878.191795] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590ae022-8f90-4674-9a1b-2d497762c38d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.199946] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46237fb-a35b-415d-852a-3de4f6602a3d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.213327] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1878.218723] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Creating folder: Project (366c469c677d4a1da3b7c626ebb040f1). Parent ref: group-v353379. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1878.218979] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d86caa1e-9b75-4359-a9c1-1a98891b67ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.229391] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Created folder: Project (366c469c677d4a1da3b7c626ebb040f1) in parent group-v353379. [ 1878.229554] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Creating folder: Instances. Parent ref: group-v353690. {{(pid=62820) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1878.229766] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7bfb0ce6-02ba-48ff-816c-ebae83b1abea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.238916] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Created folder: Instances in parent group-v353690. [ 1878.239214] env[62820]: DEBUG oslo.service.loopingcall [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.239460] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1878.239653] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5259e782-26c1-4f03-9631-88f2cb82ed2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.254899] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1878.254899] env[62820]: value = "task-1696514" [ 1878.254899] env[62820]: _type = "Task" [ 1878.254899] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.265262] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696514, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.344477] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.440997] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1878.441307] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79976619-683c-4445-b96b-d300da9cbb73 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.449170] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1878.449170] env[62820]: value = "task-1696515" [ 1878.449170] env[62820]: _type = "Task" [ 1878.449170] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.457769] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696515, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.481247] env[62820]: DEBUG oslo_vmware.api [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696511, 'name': PowerOnVM_Task, 'duration_secs': 1.093287} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.481502] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1878.481703] env[62820]: INFO nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Took 10.42 seconds to spawn the instance on the hypervisor. [ 1878.481884] env[62820]: DEBUG nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1878.482629] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8f8d96-93f3-442e-a46d-eec22700e505 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.764674] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696514, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.848846] env[62820]: DEBUG nova.compute.manager [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62820) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1878.958621] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696515, 'name': PowerOffVM_Task, 'duration_secs': 0.29011} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.961092] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1879.001262] env[62820]: INFO nova.compute.manager [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Took 16.11 seconds to build instance. [ 1879.006567] env[62820]: INFO nova.compute.manager [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Detaching volume ce8fe0f5-4703-4d35-897a-774c6b74f0d6 [ 1879.025893] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b4b1ac-a0e4-4808-b978-c246a3c8c5b6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.033929] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d547d954-e6c2-4ba0-aa6d-8e18ee02f688 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.040627] env[62820]: INFO nova.virt.block_device [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Attempting to driver detach volume ce8fe0f5-4703-4d35-897a-774c6b74f0d6 from mountpoint /dev/sdb [ 1879.040840] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1879.041025] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1879.041745] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2afed1-e1f1-4fb1-a959-e8df84d31323 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.082929] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16957e9-41c8-48a1-b243-f5c150108797 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.110547] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dbefdd-0dc9-418c-88e4-320772bddc4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.116883] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f69ed4-c29e-45b0-b9e8-1537c15ed215 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.123117] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a77723a-958a-4f17-9853-920ae157f26d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.134467] env[62820]: DEBUG nova.compute.provider_tree [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.154732] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b4679f-9c3e-477d-bd47-a83fb1f0f743 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.170260] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] The volume has not been displaced from its original location: [datastore1] volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6/volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1879.175361] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1879.175671] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef770a37-8d50-4032-b35f-c5153b5e942d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.193840] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1879.193840] env[62820]: value = "task-1696516" [ 1879.193840] env[62820]: _type = "Task" [ 1879.193840] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.203987] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696516, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.265824] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696514, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.503129] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a39fd3e2-5b4b-4e49-9fa0-b40ec7e209de tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "2fe561a2-57ad-4385-830e-61cd274c7123" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.620s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.639602] env[62820]: DEBUG nova.scheduler.client.report [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1879.704381] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696516, 'name': ReconfigVM_Task, 'duration_secs': 0.18462} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.704666] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1879.709520] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-946ddddb-8bfe-4586-98aa-e69123cd978e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.725739] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1879.725739] env[62820]: value = "task-1696517" [ 1879.725739] env[62820]: _type = "Task" [ 1879.725739] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.733872] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696517, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.766513] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696514, 'name': CreateVM_Task, 'duration_secs': 1.371523} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.766513] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1879.766867] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.767062] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.767500] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1879.767856] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c365094-ec3a-4a8c-915c-32a0355da4e1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.773095] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1879.773095] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521bbdff-67ad-c1f5-2174-07adfb1172a1" [ 1879.773095] env[62820]: _type = "Task" [ 1879.773095] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.781043] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521bbdff-67ad-c1f5-2174-07adfb1172a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.946691] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.144774] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.145337] env[62820]: DEBUG nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1880.147866] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.201s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.236524] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696517, 'name': ReconfigVM_Task, 'duration_secs': 0.159028} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.236524] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1880.284078] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521bbdff-67ad-c1f5-2174-07adfb1172a1, 'name': SearchDatastore_Task, 'duration_secs': 0.00864} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.284393] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.284634] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1880.284861] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.285015] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.285202] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1880.285462] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93a5149d-8431-492c-ba63-b66a2f609a2a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.295944] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1880.296146] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1880.296893] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff29dfad-58da-4a9e-8177-a62adc82d6e2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.302264] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1880.302264] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52404099-8c46-ca01-7585-4951d66a2f0d" [ 1880.302264] env[62820]: _type = "Task" [ 1880.302264] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.310535] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52404099-8c46-ca01-7585-4951d66a2f0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.354735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "2fe561a2-57ad-4385-830e-61cd274c7123" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.355131] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "2fe561a2-57ad-4385-830e-61cd274c7123" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.355331] env[62820]: DEBUG nova.compute.manager [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1880.356229] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c10c97-217f-4e2d-aec9-398c9f0511b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.363392] env[62820]: DEBUG nova.compute.manager [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1880.363979] env[62820]: DEBUG nova.objects.instance [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lazy-loading 'flavor' on Instance uuid 2fe561a2-57ad-4385-830e-61cd274c7123 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1880.651067] env[62820]: DEBUG nova.compute.utils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1880.652647] env[62820]: DEBUG nova.objects.instance [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'migration_context' on Instance uuid a8803178-7fa3-42ea-824c-901063673062 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1880.653764] env[62820]: DEBUG nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1880.653933] env[62820]: DEBUG nova.network.neutron [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1880.695566] env[62820]: DEBUG nova.policy [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a11a3b5fb67a49ceb7bceb2770021fcf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2ccee293cde400f927db43f421cd50d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1880.812684] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52404099-8c46-ca01-7585-4951d66a2f0d, 'name': SearchDatastore_Task, 'duration_secs': 0.009332} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.813514] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88a4741d-b6b9-40e5-8402-d33b79cd8661 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.818927] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1880.818927] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528ccc36-81f5-4e54-a30f-cfc28b518aab" [ 1880.818927] env[62820]: _type = "Task" [ 1880.818927] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.827456] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528ccc36-81f5-4e54-a30f-cfc28b518aab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.955782] env[62820]: DEBUG nova.network.neutron [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Successfully created port: fb03572c-f629-44cb-9538-f1fe53eca171 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1881.155310] env[62820]: DEBUG nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1881.333245] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528ccc36-81f5-4e54-a30f-cfc28b518aab, 'name': SearchDatastore_Task, 'duration_secs': 0.009435} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.333847] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.334139] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 44889009-b397-463f-be67-d67126d3fa5a/44889009-b397-463f-be67-d67126d3fa5a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1881.334416] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-215955d8-8e2a-4a1c-9e3f-a496a643b94b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.341697] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1881.341697] env[62820]: value = "task-1696518" [ 1881.341697] env[62820]: _type = "Task" [ 1881.341697] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.347316] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00311097-cfb5-49c9-87ff-3a00b42709bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.351939] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1881.356584] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d430bee5-fa12-48b7-9ad4-2994d4cb6685 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.358223] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.359159] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab11c8ca-b9eb-4221-b9fb-374d2d30e9f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.393080] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1881.394908] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d01d1b3c-ab34-47bb-9e3d-a6746857af65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.397139] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32fa2ad-b764-4f46-80ca-fd3b5327ac22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.399724] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1881.399724] env[62820]: value = "task-1696519" [ 1881.399724] env[62820]: _type = "Task" [ 1881.399724] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.410821] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d08b5f3-e71b-46f9-8e35-57485c5cebe7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.414807] env[62820]: DEBUG oslo_vmware.api [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1881.414807] env[62820]: value = "task-1696520" [ 1881.414807] env[62820]: _type = "Task" [ 1881.414807] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.415254] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1881.415452] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1881.415641] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1881.419889] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa83ae2-e80b-434c-9efd-a6ac680f6b8b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.430135] env[62820]: DEBUG nova.compute.provider_tree [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1881.437134] env[62820]: DEBUG oslo_vmware.api [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696520, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.452051] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b4f0cf-b822-4749-9721-1d21fd4fc2e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.459303] env[62820]: WARNING nova.virt.vmwareapi.driver [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1881.459642] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1881.460528] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9be1948-7141-47f9-9423-cdbb9148cb7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.468351] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1881.468622] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f244f4bc-38bd-4422-8c8c-17ef357ceee8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.669080] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1881.669080] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1881.669080] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleting the datastore file [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1881.669080] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94f88316-8d0e-4e1b-a589-e4c63f3aa0f3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.677570] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1881.677570] env[62820]: value = "task-1696522" [ 1881.677570] env[62820]: _type = "Task" [ 1881.677570] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.688742] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.853373] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696518, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.924666] env[62820]: DEBUG oslo_vmware.api [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696520, 'name': PowerOffVM_Task, 'duration_secs': 0.209302} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.924930] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1881.925145] env[62820]: DEBUG nova.compute.manager [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1881.925906] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4a4ea2-ef75-4941-a54f-9d424103f2ec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.935778] env[62820]: DEBUG nova.scheduler.client.report [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1882.173623] env[62820]: DEBUG nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1882.189711] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.202204] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1882.202468] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1882.202625] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1882.202810] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1882.202958] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1882.203367] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1882.203367] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1882.203478] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1882.203628] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1882.203795] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1882.204008] env[62820]: DEBUG nova.virt.hardware [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1882.204935] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3e02e9-3887-42a9-b9dc-a3b238a052b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.212290] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c97d715-1638-4d3f-b78c-f24444c45431 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.354179] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614774} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.354579] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 44889009-b397-463f-be67-d67126d3fa5a/44889009-b397-463f-be67-d67126d3fa5a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1882.354796] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1882.355068] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7512a23e-ac44-4d24-8e7e-5d43eaf51ecd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.362968] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1882.362968] env[62820]: value = "task-1696523" [ 1882.362968] env[62820]: _type = "Task" [ 1882.362968] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.371019] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696523, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.390882] env[62820]: DEBUG nova.compute.manager [req-cb2d2127-30bf-480f-9900-1a64f6162f36 req-52f9b830-fb09-4b89-abd2-3d60b337aa45 service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Received event network-vif-plugged-fb03572c-f629-44cb-9538-f1fe53eca171 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1882.391132] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb2d2127-30bf-480f-9900-1a64f6162f36 req-52f9b830-fb09-4b89-abd2-3d60b337aa45 service nova] Acquiring lock "70ef320e-16c4-4aa8-8770-4828f71868f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.391343] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb2d2127-30bf-480f-9900-1a64f6162f36 req-52f9b830-fb09-4b89-abd2-3d60b337aa45 service nova] Lock "70ef320e-16c4-4aa8-8770-4828f71868f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.391532] env[62820]: DEBUG oslo_concurrency.lockutils [req-cb2d2127-30bf-480f-9900-1a64f6162f36 req-52f9b830-fb09-4b89-abd2-3d60b337aa45 service nova] Lock "70ef320e-16c4-4aa8-8770-4828f71868f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.391721] env[62820]: DEBUG nova.compute.manager [req-cb2d2127-30bf-480f-9900-1a64f6162f36 req-52f9b830-fb09-4b89-abd2-3d60b337aa45 service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] No waiting events found dispatching network-vif-plugged-fb03572c-f629-44cb-9538-f1fe53eca171 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1882.391893] env[62820]: WARNING nova.compute.manager [req-cb2d2127-30bf-480f-9900-1a64f6162f36 req-52f9b830-fb09-4b89-abd2-3d60b337aa45 service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Received unexpected event network-vif-plugged-fb03572c-f629-44cb-9538-f1fe53eca171 for instance with vm_state building and task_state spawning. [ 1882.438050] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e56deec9-49ff-497f-9ba7-16ac0aa89f7b tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "2fe561a2-57ad-4385-830e-61cd274c7123" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.083s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.546277] env[62820]: DEBUG nova.network.neutron [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Successfully updated port: fb03572c-f629-44cb-9538-f1fe53eca171 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1882.690686] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696522, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.519439} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.690950] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1882.691153] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1882.691330] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1882.874090] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696523, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076514} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.874419] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1882.875060] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505bf95b-94d3-4e89-a725-96363b4019e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.897926] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 44889009-b397-463f-be67-d67126d3fa5a/44889009-b397-463f-be67-d67126d3fa5a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1882.898252] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3313d933-bdd5-4b05-a2d1-8e5224e216d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.920879] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1882.920879] env[62820]: value = "task-1696524" [ 1882.920879] env[62820]: _type = "Task" [ 1882.920879] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.928565] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696524, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.946791] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.799s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.059115] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.059393] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.059638] env[62820]: DEBUG nova.network.neutron [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1883.092368] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "2fe561a2-57ad-4385-830e-61cd274c7123" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.092824] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "2fe561a2-57ad-4385-830e-61cd274c7123" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.093115] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "2fe561a2-57ad-4385-830e-61cd274c7123-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.093328] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "2fe561a2-57ad-4385-830e-61cd274c7123-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.093508] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "2fe561a2-57ad-4385-830e-61cd274c7123-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.095757] env[62820]: INFO nova.compute.manager [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Terminating instance [ 1883.196900] env[62820]: INFO nova.virt.block_device [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Booting with volume ce8fe0f5-4703-4d35-897a-774c6b74f0d6 at /dev/sdb [ 1883.233366] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93cdcbbd-9166-4ee4-8fbf-3dfcd2b9bc28 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.243352] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80ae01b-a59e-43c3-ac48-7cf87af28941 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.273571] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b856642-0999-4d3a-b585-5a62af9b7fc1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.281676] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d6c903-b7a0-4940-bcac-83608add2e91 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.315063] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be81c4e-4999-47a2-b0e5-24359b95a319 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.322076] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fb5010-b51b-4216-b205-471c67faaf53 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.335449] env[62820]: DEBUG nova.virt.block_device [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updating existing volume attachment record: 530f90c2-755c-4311-9c29-e34bbe993f6e {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1883.430708] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696524, 'name': ReconfigVM_Task, 'duration_secs': 0.273107} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.431018] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 44889009-b397-463f-be67-d67126d3fa5a/44889009-b397-463f-be67-d67126d3fa5a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1883.431660] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbd3eedf-e426-4f6f-b674-49dc6db95788 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.438092] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1883.438092] env[62820]: value = "task-1696525" [ 1883.438092] env[62820]: _type = "Task" [ 1883.438092] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.446056] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696525, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.599836] env[62820]: DEBUG nova.compute.manager [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1883.600170] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1883.601181] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b55f747-c115-4076-a47d-67f840e3b0df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.608670] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1883.608909] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9915cff-6d5d-42d6-a341-82fe767765b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.649856] env[62820]: DEBUG nova.network.neutron [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1883.817074] env[62820]: DEBUG nova.network.neutron [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Updating instance_info_cache with network_info: [{"id": "fb03572c-f629-44cb-9538-f1fe53eca171", "address": "fa:16:3e:78:ed:e9", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb03572c-f6", "ovs_interfaceid": "fb03572c-f629-44cb-9538-f1fe53eca171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.862825] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1883.863071] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1883.863261] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleting the datastore file [datastore1] 2fe561a2-57ad-4385-830e-61cd274c7123 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1883.863568] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14d20658-3d10-4882-8781-c69287dcef92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.871032] env[62820]: DEBUG oslo_vmware.api [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1883.871032] env[62820]: value = "task-1696527" [ 1883.871032] env[62820]: _type = "Task" [ 1883.871032] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.880879] env[62820]: DEBUG oslo_vmware.api [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.948393] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696525, 'name': Rename_Task, 'duration_secs': 0.135234} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.948633] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1883.948913] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56318428-ecce-4bba-b324-26fec9063a22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.956025] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1883.956025] env[62820]: value = "task-1696528" [ 1883.956025] env[62820]: _type = "Task" [ 1883.956025] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.971438] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.320572] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.320971] env[62820]: DEBUG nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Instance network_info: |[{"id": "fb03572c-f629-44cb-9538-f1fe53eca171", "address": "fa:16:3e:78:ed:e9", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb03572c-f6", "ovs_interfaceid": "fb03572c-f629-44cb-9538-f1fe53eca171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1884.321445] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:ed:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '30c39e9a-a798-4f25-a48c-91f786ba332c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb03572c-f629-44cb-9538-f1fe53eca171', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1884.329459] env[62820]: DEBUG oslo.service.loopingcall [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1884.329681] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1884.329912] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdc88353-b427-4d35-8ea5-50f11a5de49b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.350565] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1884.350565] env[62820]: value = "task-1696529" [ 1884.350565] env[62820]: _type = "Task" [ 1884.350565] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.359118] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696529, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.386568] env[62820]: DEBUG oslo_vmware.api [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142037} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.387878] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1884.387878] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1884.387878] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1884.388312] env[62820]: INFO nova.compute.manager [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Took 0.79 seconds to destroy the instance on the hypervisor. [ 1884.388965] env[62820]: DEBUG oslo.service.loopingcall [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1884.388965] env[62820]: DEBUG nova.compute.manager [-] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1884.388965] env[62820]: DEBUG nova.network.neutron [-] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1884.425446] env[62820]: DEBUG nova.compute.manager [req-e8b94983-b647-4e99-bd45-997015aaf8cc req-09a78b2a-9508-4aa9-9133-44e796f1c29a service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Received event network-changed-fb03572c-f629-44cb-9538-f1fe53eca171 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1884.425755] env[62820]: DEBUG nova.compute.manager [req-e8b94983-b647-4e99-bd45-997015aaf8cc req-09a78b2a-9508-4aa9-9133-44e796f1c29a service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Refreshing instance network info cache due to event network-changed-fb03572c-f629-44cb-9538-f1fe53eca171. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1884.426090] env[62820]: DEBUG oslo_concurrency.lockutils [req-e8b94983-b647-4e99-bd45-997015aaf8cc req-09a78b2a-9508-4aa9-9133-44e796f1c29a service nova] Acquiring lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.426307] env[62820]: DEBUG oslo_concurrency.lockutils [req-e8b94983-b647-4e99-bd45-997015aaf8cc req-09a78b2a-9508-4aa9-9133-44e796f1c29a service nova] Acquired lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.426548] env[62820]: DEBUG nova.network.neutron [req-e8b94983-b647-4e99-bd45-997015aaf8cc req-09a78b2a-9508-4aa9-9133-44e796f1c29a service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Refreshing network info cache for port fb03572c-f629-44cb-9538-f1fe53eca171 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1884.467302] env[62820]: DEBUG oslo_vmware.api [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696528, 'name': PowerOnVM_Task, 'duration_secs': 0.506321} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.467302] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1884.467302] env[62820]: INFO nova.compute.manager [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Took 6.30 seconds to spawn the instance on the hypervisor. [ 1884.467630] env[62820]: DEBUG nova.compute.manager [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1884.470122] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ae4b56-c437-429a-87fc-5dd34bd10073 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.489521] env[62820]: INFO nova.compute.manager [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Swapping old allocation on dict_keys(['8a0693d4-1456-4a04-ae15-b1eaea0edd7a']) held by migration 8e1629bd-9bfc-40aa-9009-3ac711cb7167 for instance [ 1884.513901] env[62820]: DEBUG nova.scheduler.client.report [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Overwriting current allocation {'allocations': {'8a0693d4-1456-4a04-ae15-b1eaea0edd7a': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 174}}, 'project_id': '7fef128f5c704730b335b62f6cce0416', 'user_id': 'bd41e844bb294c6ab6e3869af994f60a', 'consumer_generation': 1} on consumer a8803178-7fa3-42ea-824c-901063673062 {{(pid=62820) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1884.600443] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.600711] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.600931] env[62820]: DEBUG nova.network.neutron [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1884.860540] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696529, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.989160] env[62820]: INFO nova.compute.manager [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Took 11.25 seconds to build instance. [ 1885.141230] env[62820]: DEBUG nova.network.neutron [-] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.360672] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696529, 'name': CreateVM_Task, 'duration_secs': 0.525564} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.360869] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1885.361581] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.362060] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.362060] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1885.362314] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-669491bf-c7cb-4d7c-ae17-a33ba8233c1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.367038] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1885.367038] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52721d1e-fc52-e3bf-67d7-f1da221261d1" [ 1885.367038] env[62820]: _type = "Task" [ 1885.367038] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.376347] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52721d1e-fc52-e3bf-67d7-f1da221261d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.447549] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1885.447843] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1885.448026] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1885.448223] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1885.448393] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1885.448572] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1885.448788] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1885.448950] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1885.449129] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1885.449292] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1885.449521] env[62820]: DEBUG nova.virt.hardware [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1885.450411] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6452250d-7d12-4a31-9cfd-4deac6dd4d7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.461183] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d354a00-abed-4f5f-89c9-fbfde99e031e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.468359] env[62820]: DEBUG nova.network.neutron [req-e8b94983-b647-4e99-bd45-997015aaf8cc req-09a78b2a-9508-4aa9-9133-44e796f1c29a service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Updated VIF entry in instance network info cache for port fb03572c-f629-44cb-9538-f1fe53eca171. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1885.468761] env[62820]: DEBUG nova.network.neutron [req-e8b94983-b647-4e99-bd45-997015aaf8cc req-09a78b2a-9508-4aa9-9133-44e796f1c29a service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Updating instance_info_cache with network_info: [{"id": "fb03572c-f629-44cb-9538-f1fe53eca171", "address": "fa:16:3e:78:ed:e9", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb03572c-f6", "ovs_interfaceid": "fb03572c-f629-44cb-9538-f1fe53eca171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.481666] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:8d:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4b5b723-be36-401c-8214-964a362697b6', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1885.489552] env[62820]: DEBUG oslo.service.loopingcall [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.490805] env[62820]: DEBUG oslo_concurrency.lockutils [req-e8b94983-b647-4e99-bd45-997015aaf8cc req-09a78b2a-9508-4aa9-9133-44e796f1c29a service nova] Releasing lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.491169] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1885.491676] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f6aeea68-63d1-4657-8cb8-47af60ad81f5 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "44889009-b397-463f-be67-d67126d3fa5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.760s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.492147] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4dc1a7c4-8b30-406d-8e7b-8504af93734b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.508026] env[62820]: DEBUG nova.network.neutron [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [{"id": "f7027439-2429-4746-8bc9-a95ce975c96a", "address": "fa:16:3e:80:90:a0", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7027439-24", "ovs_interfaceid": "f7027439-2429-4746-8bc9-a95ce975c96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.515993] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1885.515993] env[62820]: value = "task-1696530" [ 1885.515993] env[62820]: _type = "Task" [ 1885.515993] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.524239] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696530, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.583377] env[62820]: INFO nova.compute.manager [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Rebuilding instance [ 1885.626134] env[62820]: DEBUG nova.compute.manager [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1885.626515] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbbaf24-b576-4e5c-9c4d-afe77301489e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.644095] env[62820]: INFO nova.compute.manager [-] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Took 1.26 seconds to deallocate network for instance. [ 1885.877955] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52721d1e-fc52-e3bf-67d7-f1da221261d1, 'name': SearchDatastore_Task, 'duration_secs': 0.034097} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.878256] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.878495] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1885.878733] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.878879] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.879072] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1885.879350] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20140ce5-f6bc-40f3-98a5-c91f33f44c63 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.887966] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1885.888191] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1885.888914] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d9ccf96-0405-4ac3-956f-1cde74087096 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.894346] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1885.894346] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]526c9be4-9919-959d-c5d1-e2fb15209c62" [ 1885.894346] env[62820]: _type = "Task" [ 1885.894346] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.902097] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526c9be4-9919-959d-c5d1-e2fb15209c62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.010633] env[62820]: DEBUG oslo_concurrency.lockutils [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-a8803178-7fa3-42ea-824c-901063673062" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.011673] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44da04f-0fb2-4135-bc2c-a1ac905f0793 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.021390] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0573fb48-ce5a-4245-a03a-8aa8ac1868f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.026653] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696530, 'name': CreateVM_Task, 'duration_secs': 0.384276} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.027117] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1886.027832] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.028033] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.028353] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1886.028603] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-033149ce-f09e-4374-ab47-8894480c2ba9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.033297] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1886.033297] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52226838-e86e-05c2-0b7e-8f09376f8196" [ 1886.033297] env[62820]: _type = "Task" [ 1886.033297] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.045712] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52226838-e86e-05c2-0b7e-8f09376f8196, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.149529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.149730] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.149960] env[62820]: DEBUG nova.objects.instance [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lazy-loading 'resources' on Instance uuid 2fe561a2-57ad-4385-830e-61cd274c7123 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1886.406695] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]526c9be4-9919-959d-c5d1-e2fb15209c62, 'name': SearchDatastore_Task, 'duration_secs': 0.009169} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.407697] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14b6d4f7-7bed-445c-8d0e-35b2e3ba3353 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.413325] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1886.413325] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d992ae-a655-0f32-41e7-55dc2412d809" [ 1886.413325] env[62820]: _type = "Task" [ 1886.413325] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.421565] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d992ae-a655-0f32-41e7-55dc2412d809, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.450659] env[62820]: DEBUG nova.compute.manager [req-e06af5ba-57aa-4fb4-affb-05e1eeaae654 req-8f2c21b8-4290-4666-9459-af733fe7b2d4 service nova] [instance: 2fe561a2-57ad-4385-830e-61cd274c7123] Received event network-vif-deleted-41529505-0e12-447f-ab2d-6cc5935c3a5c {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1886.543847] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52226838-e86e-05c2-0b7e-8f09376f8196, 'name': SearchDatastore_Task, 'duration_secs': 0.012689} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.544231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.544536] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1886.544819] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.640797] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1886.641285] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b762e574-7a3c-4bce-90d0-4bbae837e0e0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.649067] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1886.649067] env[62820]: value = "task-1696531" [ 1886.649067] env[62820]: _type = "Task" [ 1886.649067] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.659683] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.798080] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4941f9d-fd01-4819-90f8-ffc84b88dbe3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.805544] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f447bd-580a-4783-883c-f94111cd959c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.834549] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd31715f-9bcc-47fc-8d52-d1fa7d6e6344 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.841938] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0530bcbd-8cb5-4da1-98df-296d20565fda {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.854595] env[62820]: DEBUG nova.compute.provider_tree [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1886.925611] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d992ae-a655-0f32-41e7-55dc2412d809, 'name': SearchDatastore_Task, 'duration_secs': 0.009218} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.926102] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.926198] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5/70ef320e-16c4-4aa8-8770-4828f71868f5.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1886.926409] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.926593] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1886.927177] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b13237c-bf9f-4bec-84e3-4ba0de6faca4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.929562] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-731580d2-5b8f-4595-b786-b4eaedde147b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.937051] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1886.937051] env[62820]: value = "task-1696532" [ 1886.937051] env[62820]: _type = "Task" [ 1886.937051] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.940441] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1886.940635] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1886.941608] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0f29b54-fddc-4982-b046-41f95059724f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.946462] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696532, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.949691] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1886.949691] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]520fc7a3-1fe1-f07a-f2f0-2a342d466aca" [ 1886.949691] env[62820]: _type = "Task" [ 1886.949691] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.957133] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520fc7a3-1fe1-f07a-f2f0-2a342d466aca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.106162] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1887.106613] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3856332-0aee-42e1-bdd9-a191035e959a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.115608] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1887.115608] env[62820]: value = "task-1696533" [ 1887.115608] env[62820]: _type = "Task" [ 1887.115608] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.126162] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696533, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.161448] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696531, 'name': PowerOffVM_Task, 'duration_secs': 0.121063} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.161869] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1887.162208] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1887.163143] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b398720-3ddf-48f4-b4cb-bca6a5fbcf99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.171905] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1887.172209] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-314663f7-a46e-4ad4-85dd-af2f14f8df24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.203257] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1887.203257] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1887.203257] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Deleting the datastore file [datastore1] 44889009-b397-463f-be67-d67126d3fa5a {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1887.203257] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bfb87ee-4d2a-41a8-8bbd-ad2006808307 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.209789] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1887.209789] env[62820]: value = "task-1696535" [ 1887.209789] env[62820]: _type = "Task" [ 1887.209789] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.219101] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.358087] env[62820]: DEBUG nova.scheduler.client.report [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1887.447381] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696532, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452887} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.447647] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5/70ef320e-16c4-4aa8-8770-4828f71868f5.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1887.447866] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1887.448138] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91c967ac-25a0-4767-ba10-45709e296253 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.458944] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]520fc7a3-1fe1-f07a-f2f0-2a342d466aca, 'name': SearchDatastore_Task, 'duration_secs': 0.01709} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.460702] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1887.460702] env[62820]: value = "task-1696536" [ 1887.460702] env[62820]: _type = "Task" [ 1887.460702] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.460919] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-770ed562-efa1-4d7c-b8b3-6037b2cce8d9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.475127] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.476111] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1887.476111] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522f82ed-c616-3dd5-55be-00d35b2907bb" [ 1887.476111] env[62820]: _type = "Task" [ 1887.476111] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.483598] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522f82ed-c616-3dd5-55be-00d35b2907bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.626039] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696533, 'name': PowerOffVM_Task, 'duration_secs': 0.306991} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.626319] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1887.626972] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1887.627207] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1887.627380] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1887.627581] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1887.627732] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1887.627878] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1887.628093] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1887.628259] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1887.628427] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1887.628592] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1887.628766] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1887.633670] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45c157bf-235e-4072-94af-dcc85332cb55 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.649356] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1887.649356] env[62820]: value = "task-1696537" [ 1887.649356] env[62820]: _type = "Task" [ 1887.649356] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.656882] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696537, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.719188] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227229} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.719430] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1887.719668] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1887.719855] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1887.863062] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.713s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.889253] env[62820]: INFO nova.scheduler.client.report [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted allocations for instance 2fe561a2-57ad-4385-830e-61cd274c7123 [ 1887.972193] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065983} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.972455] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1887.973231] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5103e09a-449d-495a-906d-afa62a321d64 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.997109] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5/70ef320e-16c4-4aa8-8770-4828f71868f5.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1887.997672] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd723d04-c6ba-4a7a-867e-7d67ec461d2b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.014692] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]522f82ed-c616-3dd5-55be-00d35b2907bb, 'name': SearchDatastore_Task, 'duration_secs': 0.010706} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.015284] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.015543] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8/25c8d7b7-d639-474a-b5cc-c01a6a0a79f8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1888.015832] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bbcb727-53c4-45e0-a72d-c01cc6d4042c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.021753] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1888.021753] env[62820]: value = "task-1696538" [ 1888.021753] env[62820]: _type = "Task" [ 1888.021753] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.022998] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1888.022998] env[62820]: value = "task-1696539" [ 1888.022998] env[62820]: _type = "Task" [ 1888.022998] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.033727] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.036716] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696538, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.158682] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696537, 'name': ReconfigVM_Task, 'duration_secs': 0.144496} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.159411] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55e9b9c-3420-4b7f-9e38-545a15785bd7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.180198] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1888.180476] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1888.180651] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1888.180837] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1888.181040] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1888.181142] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1888.181347] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1888.181505] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1888.181670] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1888.181837] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1888.182022] env[62820]: DEBUG nova.virt.hardware [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1888.182822] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24f67a15-5632-439b-8eda-b8a2cdbf8467 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.188544] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1888.188544] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52dbd388-cdf0-9743-61c6-8d522fef23db" [ 1888.188544] env[62820]: _type = "Task" [ 1888.188544] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.196623] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52dbd388-cdf0-9743-61c6-8d522fef23db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.400027] env[62820]: DEBUG oslo_concurrency.lockutils [None req-98c8083a-9a06-4680-95e5-27dad7b80a4a tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "2fe561a2-57ad-4385-830e-61cd274c7123" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.306s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.537525] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.540534] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696539, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504117} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.540788] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8/25c8d7b7-d639-474a-b5cc-c01a6a0a79f8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1888.541028] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1888.541282] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9df5ebc2-fad0-4f63-8e21-b63a8a41b00b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.547177] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1888.547177] env[62820]: value = "task-1696540" [ 1888.547177] env[62820]: _type = "Task" [ 1888.547177] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.555325] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696540, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.702479] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52dbd388-cdf0-9743-61c6-8d522fef23db, 'name': SearchDatastore_Task, 'duration_secs': 0.040485} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.707692] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1888.707999] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bcec625-bec3-43c6-9cc7-062dfa23b44b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.725422] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1888.725422] env[62820]: value = "task-1696541" [ 1888.725422] env[62820]: _type = "Task" [ 1888.725422] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.735622] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.755672] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1888.755915] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1888.756078] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1888.756300] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1888.756412] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1888.756560] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1888.756771] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1888.756930] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1888.757195] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1888.757393] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1888.757568] env[62820]: DEBUG nova.virt.hardware [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1888.758404] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525b9eb4-eceb-4aa6-a8a5-348a1ce8305a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.765916] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998492b2-680c-431e-b960-bad3352aa4a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.778816] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Instance VIF info [] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1888.784198] env[62820]: DEBUG oslo.service.loopingcall [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1888.784435] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1888.784635] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aba4a4eb-15dd-4f0a-8d7f-03daee300cd4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.800775] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1888.800775] env[62820]: value = "task-1696542" [ 1888.800775] env[62820]: _type = "Task" [ 1888.800775] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.807994] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696542, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.886496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.886946] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.887330] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.887724] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.887951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.890755] env[62820]: INFO nova.compute.manager [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Terminating instance [ 1889.034054] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696538, 'name': ReconfigVM_Task, 'duration_secs': 0.577013} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.034351] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5/70ef320e-16c4-4aa8-8770-4828f71868f5.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1889.034997] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cec8220-0102-4bfb-9beb-adea20de92ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.041229] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1889.041229] env[62820]: value = "task-1696543" [ 1889.041229] env[62820]: _type = "Task" [ 1889.041229] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.048738] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696543, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.055562] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696540, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084432} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.055806] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1889.056536] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe456753-b890-430d-9943-d54d4c25cd53 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.077938] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8/25c8d7b7-d639-474a-b5cc-c01a6a0a79f8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1889.078257] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31940091-0481-435a-927a-e249fbc681a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.097823] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1889.097823] env[62820]: value = "task-1696544" [ 1889.097823] env[62820]: _type = "Task" [ 1889.097823] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.110989] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.234560] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696541, 'name': ReconfigVM_Task, 'duration_secs': 0.232011} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.234881] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1889.235635] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b03125e-4a8b-4673-aaa1-2ae9441920ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.260875] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] a8803178-7fa3-42ea-824c-901063673062/a8803178-7fa3-42ea-824c-901063673062.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1889.261199] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-736ca791-4f4f-4c5f-bf65-3ef00838b1fa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.280837] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1889.280837] env[62820]: value = "task-1696545" [ 1889.280837] env[62820]: _type = "Task" [ 1889.280837] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.288630] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696545, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.309864] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696542, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.395335] env[62820]: DEBUG nova.compute.manager [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1889.395644] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1889.396670] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf497a1-ea8f-4ed6-816c-e6d77d17c5ea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.408647] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1889.408884] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-269fa04e-ddaa-4e28-b53d-37d39e3d1e87 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.414888] env[62820]: DEBUG oslo_vmware.api [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1889.414888] env[62820]: value = "task-1696546" [ 1889.414888] env[62820]: _type = "Task" [ 1889.414888] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.422641] env[62820]: DEBUG oslo_vmware.api [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696546, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.550928] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696543, 'name': Rename_Task, 'duration_secs': 0.156561} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.551215] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1889.551483] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-afead2f4-7ad5-42b4-9aae-a6b9d6fb5954 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.557836] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1889.557836] env[62820]: value = "task-1696547" [ 1889.557836] env[62820]: _type = "Task" [ 1889.557836] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.566084] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696547, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.607292] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696544, 'name': ReconfigVM_Task, 'duration_secs': 0.315245} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.607682] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8/25c8d7b7-d639-474a-b5cc-c01a6a0a79f8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1889.608970] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'boot_index': 0, 'device_type': 'disk', 'guest_format': None, 'disk_bus': None, 'encrypted': False, 'size': 0, 'device_name': '/dev/sda', 'encryption_options': None, 'encryption_secret_uuid': None, 'encryption_format': None, 'image_id': 'b17619ac-779a-4463-ab94-4bb0b9ba63c1'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'}, 'attachment_id': '530f90c2-755c-4311-9c29-e34bbe993f6e', 'delete_on_termination': False, 'boot_index': None, 'device_type': None, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=62820) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1889.609206] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1889.609402] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1889.610236] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7a9f05-7af3-4f2c-9a4e-538f3e7b25ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.625855] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c80aa7-dbc9-4bf9-b291-6f4de6194fb9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.649311] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6/volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1889.649634] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-584ddbc3-9b5d-49ae-a46b-37c682246a57 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.667253] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1889.667253] env[62820]: value = "task-1696548" [ 1889.667253] env[62820]: _type = "Task" [ 1889.667253] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.677360] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696548, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.791084] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696545, 'name': ReconfigVM_Task, 'duration_secs': 0.311376} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.791349] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfigured VM instance instance-00000066 to attach disk [datastore1] a8803178-7fa3-42ea-824c-901063673062/a8803178-7fa3-42ea-824c-901063673062.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1889.792219] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d24ba52-b2d4-4ab3-b702-4fc6e8fec3c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.815581] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2469cf-8dfb-4317-bcb0-2ecb8e6540fc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.823016] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696542, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.839267] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea17aa23-7d0c-4cc9-bccc-14cbc8dc064b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.859823] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f088a0ce-ae39-4566-a84e-c9ad2377eadb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.866641] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1889.866869] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8052a3b-dd08-4773-9914-fe40dc13da56 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.872779] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1889.872779] env[62820]: value = "task-1696549" [ 1889.872779] env[62820]: _type = "Task" [ 1889.872779] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.882460] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.924895] env[62820]: DEBUG oslo_vmware.api [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696546, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.069639] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696547, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.177436] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696548, 'name': ReconfigVM_Task, 'duration_secs': 0.442526} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.177825] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6/volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1890.183110] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83f8c498-ff47-4fe9-8b4e-a4a4ef085996 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.198572] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1890.198572] env[62820]: value = "task-1696550" [ 1890.198572] env[62820]: _type = "Task" [ 1890.198572] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.207159] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696550, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.321544] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696542, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.383171] env[62820]: DEBUG oslo_vmware.api [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696549, 'name': PowerOnVM_Task, 'duration_secs': 0.428176} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.383462] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1890.428196] env[62820]: DEBUG oslo_vmware.api [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696546, 'name': PowerOffVM_Task, 'duration_secs': 0.995053} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.428472] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1890.428641] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1890.428895] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-071b42b1-b701-4e61-b451-05a3f598f47f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.568970] env[62820]: DEBUG oslo_vmware.api [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696547, 'name': PowerOnVM_Task, 'duration_secs': 0.540909} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.569200] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1890.569402] env[62820]: INFO nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1890.569615] env[62820]: DEBUG nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1890.570360] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2c3b95-0e3b-415b-9ebc-e1553259b706 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.708475] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696550, 'name': ReconfigVM_Task, 'duration_secs': 0.208853} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.708784] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1890.709362] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-708f78d4-9940-43a2-b54e-a1ebbcd4fc65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.715596] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1890.715596] env[62820]: value = "task-1696552" [ 1890.715596] env[62820]: _type = "Task" [ 1890.715596] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.723075] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696552, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.821663] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696542, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.896238] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1890.896459] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1890.896680] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleting the datastore file [datastore1] 6da857ea-f213-4b17-9e9f-d74d1ea649c7 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1890.896948] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd7bf56a-feb6-4fc7-adde-86349c0054b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.903503] env[62820]: DEBUG oslo_vmware.api [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for the task: (returnval){ [ 1890.903503] env[62820]: value = "task-1696553" [ 1890.903503] env[62820]: _type = "Task" [ 1890.903503] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.911594] env[62820]: DEBUG oslo_vmware.api [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.088959] env[62820]: INFO nova.compute.manager [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Took 13.25 seconds to build instance. [ 1891.226127] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696552, 'name': Rename_Task, 'duration_secs': 0.292545} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.226447] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1891.226696] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45954404-acbf-4932-a20c-307779fa5684 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.233921] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1891.233921] env[62820]: value = "task-1696554" [ 1891.233921] env[62820]: _type = "Task" [ 1891.233921] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.241529] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696554, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.323537] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696542, 'name': CreateVM_Task, 'duration_secs': 2.091383} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.323926] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1891.324308] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.324395] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.324682] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1891.324937] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ab76840-b1dd-46b7-8e08-a3449044f819 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.329733] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1891.329733] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528790ef-de91-0af7-5ffb-e7e249357f36" [ 1891.329733] env[62820]: _type = "Task" [ 1891.329733] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.337212] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528790ef-de91-0af7-5ffb-e7e249357f36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.415104] env[62820]: DEBUG oslo_vmware.api [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Task: {'id': task-1696553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219598} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.415376] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.415564] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.416514] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.416514] env[62820]: INFO nova.compute.manager [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Took 2.02 seconds to destroy the instance on the hypervisor. [ 1891.416514] env[62820]: DEBUG oslo.service.loopingcall [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.416514] env[62820]: DEBUG nova.compute.manager [-] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1891.416514] env[62820]: DEBUG nova.network.neutron [-] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1891.420695] env[62820]: INFO nova.compute.manager [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Rescuing [ 1891.420944] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.421101] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.421261] env[62820]: DEBUG nova.network.neutron [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1891.427713] env[62820]: INFO nova.compute.manager [None req-48677c62-4510-4ccd-aef5-64970129fcef tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance to original state: 'active' [ 1891.591776] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e243cc96-ed2a-4ad8-b564-e9c6442dbf4f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "70ef320e-16c4-4aa8-8770-4828f71868f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.762s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.713075] env[62820]: DEBUG nova.compute.manager [req-02f85375-de9e-4524-a4a1-6f878a9d4afc req-0d2f0a01-f290-4fd8-afd4-68ca23b69de5 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Received event network-vif-deleted-ab39f297-7fa5-430b-ba72-0857fd452878 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1891.713186] env[62820]: INFO nova.compute.manager [req-02f85375-de9e-4524-a4a1-6f878a9d4afc req-0d2f0a01-f290-4fd8-afd4-68ca23b69de5 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Neutron deleted interface ab39f297-7fa5-430b-ba72-0857fd452878; detaching it from the instance and deleting it from the info cache [ 1891.713373] env[62820]: DEBUG nova.network.neutron [req-02f85375-de9e-4524-a4a1-6f878a9d4afc req-0d2f0a01-f290-4fd8-afd4-68ca23b69de5 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.744260] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696554, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.840999] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528790ef-de91-0af7-5ffb-e7e249357f36, 'name': SearchDatastore_Task, 'duration_secs': 0.009929} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.841309] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.841547] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1891.841814] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.841928] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.842122] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1891.842382] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e73123f8-aab0-46ed-9d8f-7ba90ba95055 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.850426] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1891.850601] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1891.851283] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f68a4b6-0879-4632-af22-98ce25d76f12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.856331] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1891.856331] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5276ae75-cea9-0a27-4f1b-24a052bc3edc" [ 1891.856331] env[62820]: _type = "Task" [ 1891.856331] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.863575] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5276ae75-cea9-0a27-4f1b-24a052bc3edc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.126264] env[62820]: DEBUG nova.network.neutron [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Updating instance_info_cache with network_info: [{"id": "fb03572c-f629-44cb-9538-f1fe53eca171", "address": "fa:16:3e:78:ed:e9", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb03572c-f6", "ovs_interfaceid": "fb03572c-f629-44cb-9538-f1fe53eca171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.159835] env[62820]: DEBUG nova.network.neutron [-] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.216230] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f4abaea-de67-4732-93c4-12ad88751e1e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.225938] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbf4628-f1de-4cc5-b20a-3707fe63120d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.245112] env[62820]: DEBUG oslo_vmware.api [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696554, 'name': PowerOnVM_Task, 'duration_secs': 0.601861} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.245408] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1892.245617] env[62820]: DEBUG nova.compute.manager [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1892.246384] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53301651-60e3-4498-a29e-f38bda797321 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.264943] env[62820]: DEBUG nova.compute.manager [req-02f85375-de9e-4524-a4a1-6f878a9d4afc req-0d2f0a01-f290-4fd8-afd4-68ca23b69de5 service nova] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Detach interface failed, port_id=ab39f297-7fa5-430b-ba72-0857fd452878, reason: Instance 6da857ea-f213-4b17-9e9f-d74d1ea649c7 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1892.367499] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]5276ae75-cea9-0a27-4f1b-24a052bc3edc, 'name': SearchDatastore_Task, 'duration_secs': 0.013122} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.368296] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf742e8e-c6ec-4e6e-a761-c8c918321d5a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.373849] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1892.373849] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52cdc792-7364-eb12-81fa-043302243090" [ 1892.373849] env[62820]: _type = "Task" [ 1892.373849] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.381603] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cdc792-7364-eb12-81fa-043302243090, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.628697] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.662761] env[62820]: INFO nova.compute.manager [-] [instance: 6da857ea-f213-4b17-9e9f-d74d1ea649c7] Took 1.25 seconds to deallocate network for instance. [ 1892.673029] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.673157] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.772927] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.773206] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.773415] env[62820]: DEBUG nova.objects.instance [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1892.885608] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52cdc792-7364-eb12-81fa-043302243090, 'name': SearchDatastore_Task, 'duration_secs': 0.031286} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.885861] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.886151] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 44889009-b397-463f-be67-d67126d3fa5a/44889009-b397-463f-be67-d67126d3fa5a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1892.886498] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfbbc9c3-7755-40ad-bc83-8738085e4de7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.893294] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1892.893294] env[62820]: value = "task-1696555" [ 1892.893294] env[62820]: _type = "Task" [ 1892.893294] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.901464] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696555, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.996953] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "a8803178-7fa3-42ea-824c-901063673062" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.997336] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.997616] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "a8803178-7fa3-42ea-824c-901063673062-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.997985] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.998089] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.000423] env[62820]: INFO nova.compute.manager [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Terminating instance [ 1893.168058] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.176072] env[62820]: DEBUG nova.compute.utils [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1893.403675] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696555, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.504590] env[62820]: DEBUG nova.compute.manager [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1893.504844] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1893.505154] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b30ab6ba-08b6-4f7b-9989-f39e70125236 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.512983] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1893.512983] env[62820]: value = "task-1696556" [ 1893.512983] env[62820]: _type = "Task" [ 1893.512983] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.522839] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.636206] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.636541] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.665046] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1893.665046] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6fd32e0-62a7-4204-8610-f23185f67bbd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.673811] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1893.673811] env[62820]: value = "task-1696557" [ 1893.673811] env[62820]: _type = "Task" [ 1893.673811] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.678184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.684672] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.782661] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bc774711-4587-4f12-84b8-856b9d16aa6a tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.784201] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.616s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.784471] env[62820]: DEBUG nova.objects.instance [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lazy-loading 'resources' on Instance uuid 6da857ea-f213-4b17-9e9f-d74d1ea649c7 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1893.906186] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696555, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.947819} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.906510] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 44889009-b397-463f-be67-d67126d3fa5a/44889009-b397-463f-be67-d67126d3fa5a.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1893.906831] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1893.907125] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7135687-5099-49fd-9813-7fa24bf7f583 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.914479] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1893.914479] env[62820]: value = "task-1696558" [ 1893.914479] env[62820]: _type = "Task" [ 1893.914479] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.925030] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696558, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.023233] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696556, 'name': PowerOffVM_Task, 'duration_secs': 0.409708} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.023499] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1894.023701] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1894.023893] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353674', 'volume_id': '8d042475-114b-486b-830d-875d25458b64', 'name': 'volume-8d042475-114b-486b-830d-875d25458b64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'a8803178-7fa3-42ea-824c-901063673062', 'attached_at': '2024-12-10T16:57:42.000000', 'detached_at': '', 'volume_id': '8d042475-114b-486b-830d-875d25458b64', 'serial': '8d042475-114b-486b-830d-875d25458b64'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1894.024652] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b342a7de-0816-40d5-9a9a-6ad1d1dcb9cd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.045974] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b790908-9cbe-4850-9269-6ed1eb7b02a0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.052675] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31229b1c-8406-4691-afa1-7500bd1a63b4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.074771] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93705f9d-ed83-4b64-900a-1d428170ad3a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.089647] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] The volume has not been displaced from its original location: [datastore1] volume-8d042475-114b-486b-830d-875d25458b64/volume-8d042475-114b-486b-830d-875d25458b64.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1894.094853] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1894.095157] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0e5d51d-3895-4e11-b958-2bc836cf4989 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.112436] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1894.112436] env[62820]: value = "task-1696559" [ 1894.112436] env[62820]: _type = "Task" [ 1894.112436] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.123165] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696559, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.140198] env[62820]: INFO nova.compute.manager [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Detaching volume ce8fe0f5-4703-4d35-897a-774c6b74f0d6 [ 1894.171694] env[62820]: INFO nova.virt.block_device [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Attempting to driver detach volume ce8fe0f5-4703-4d35-897a-774c6b74f0d6 from mountpoint /dev/sdb [ 1894.171960] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1894.172176] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1894.173038] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a36b7a1-b43d-41b9-b2d8-682e6afcf4dd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.201761] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696557, 'name': PowerOffVM_Task, 'duration_secs': 0.297052} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.202542] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20acdd8-b8b5-48cf-a094-e4298d23ffd7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.205211] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1894.205974] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816b21b7-279b-47ba-9760-e303972b8620 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.225906] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c553e667-a58d-463e-a68b-f2aad2bac488 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.229014] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa68b1d-59c1-4113-a55e-fed647505a73 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.258619] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.258955] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.259131] env[62820]: INFO nova.compute.manager [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Attaching volume 1c9f0326-748e-4bd5-9616-8444644f2e72 to /dev/sdb [ 1894.260986] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57502784-0896-47db-b0bf-3e6007af6bc9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.281838] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] The volume has not been displaced from its original location: [datastore1] volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6/volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1894.286812] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1894.291690] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6ef7461-ec5d-4a5e-b557-33db2c1665b5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.314283] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1894.314587] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbf1081a-cb64-4b8c-956f-8b22249964d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.319006] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f33f7ca-b8ad-47ca-a0e3-2253dc858e82 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.321682] env[62820]: DEBUG oslo_vmware.api [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1894.321682] env[62820]: value = "task-1696560" [ 1894.321682] env[62820]: _type = "Task" [ 1894.321682] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.328511] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1894.328511] env[62820]: value = "task-1696561" [ 1894.328511] env[62820]: _type = "Task" [ 1894.328511] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.329438] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e30c3b-c8b4-49c3-b69c-cae79972265e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.340018] env[62820]: DEBUG oslo_vmware.api [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696560, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.346720] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1894.346941] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1894.347211] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.347405] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.347547] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1894.347800] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec388907-e609-4853-9074-3832215acec0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.353128] env[62820]: DEBUG nova.virt.block_device [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updating existing volume attachment record: adbce413-3641-4bc9-8bd0-787b6ab717ac {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1894.365150] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1894.365362] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1894.366124] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be975b33-8c68-4392-8c2f-cdd8d14c35e4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.373225] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1894.373225] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523816b8-b43f-bc96-6720-f6e9640f417f" [ 1894.373225] env[62820]: _type = "Task" [ 1894.373225] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.381399] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523816b8-b43f-bc96-6720-f6e9640f417f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.426505] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696558, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.196688} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.429402] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1894.430706] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b434389-2039-4985-b89b-fb183c4ea19a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.450317] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 44889009-b397-463f-be67-d67126d3fa5a/44889009-b397-463f-be67-d67126d3fa5a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1894.453092] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-394b9818-e073-456b-a361-d35cfa0ff4ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.472661] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1894.472661] env[62820]: value = "task-1696562" [ 1894.472661] env[62820]: _type = "Task" [ 1894.472661] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.480440] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.520877] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b5836f-57e4-4c01-8727-6bafc16e053d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.528451] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289dfc58-c01a-422b-9436-2ad78a15db59 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.565146] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609a560f-6e12-4070-8195-0134a29fde02 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.572915] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea377d20-3104-4319-8f88-28ae37f03b17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.586772] env[62820]: DEBUG nova.compute.provider_tree [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.622400] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696559, 'name': ReconfigVM_Task, 'duration_secs': 0.310123} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.622701] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1894.627567] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58d75e15-1636-475e-872b-80bc07c28a82 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.643993] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1894.643993] env[62820]: value = "task-1696566" [ 1894.643993] env[62820]: _type = "Task" [ 1894.643993] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.657943] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696566, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.832486] env[62820]: DEBUG oslo_vmware.api [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696560, 'name': ReconfigVM_Task, 'duration_secs': 0.241641} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.832775] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1894.837308] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9803635a-dfe9-4337-bb49-142dc921ba21 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.853292] env[62820]: DEBUG oslo_vmware.api [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1894.853292] env[62820]: value = "task-1696567" [ 1894.853292] env[62820]: _type = "Task" [ 1894.853292] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.861561] env[62820]: DEBUG oslo_vmware.api [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696567, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.883384] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523816b8-b43f-bc96-6720-f6e9640f417f, 'name': SearchDatastore_Task, 'duration_secs': 0.019976} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.884198] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5679e431-63b9-40aa-92fe-9cdb9da249a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.889913] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1894.889913] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52c2f90b-77a8-d2a6-cda9-1368b04a78ea" [ 1894.889913] env[62820]: _type = "Task" [ 1894.889913] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.897510] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c2f90b-77a8-d2a6-cda9-1368b04a78ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.983447] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696562, 'name': ReconfigVM_Task, 'duration_secs': 0.295171} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.983737] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 44889009-b397-463f-be67-d67126d3fa5a/44889009-b397-463f-be67-d67126d3fa5a.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1894.984386] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d00c6f87-0ba3-48a5-a73c-88a633f7bcdd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.990939] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1894.990939] env[62820]: value = "task-1696568" [ 1894.990939] env[62820]: _type = "Task" [ 1894.990939] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.001473] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696568, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.089922] env[62820]: DEBUG nova.scheduler.client.report [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1895.153849] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696566, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.364511] env[62820]: DEBUG oslo_vmware.api [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696567, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.399616] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52c2f90b-77a8-d2a6-cda9-1368b04a78ea, 'name': SearchDatastore_Task, 'duration_secs': 0.013576} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.399898] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.400212] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. {{(pid=62820) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1895.400483] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-352e0e76-c911-4cdd-998d-171b8e0623c2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.407668] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1895.407668] env[62820]: value = "task-1696569" [ 1895.407668] env[62820]: _type = "Task" [ 1895.407668] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.414994] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.501068] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696568, 'name': Rename_Task, 'duration_secs': 0.136823} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.501364] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1895.501585] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f3fd156-dc1f-44b7-a81b-11b0c3e4cb1b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.507577] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1895.507577] env[62820]: value = "task-1696570" [ 1895.507577] env[62820]: _type = "Task" [ 1895.507577] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.515332] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696570, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.595148] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.621870] env[62820]: INFO nova.scheduler.client.report [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Deleted allocations for instance 6da857ea-f213-4b17-9e9f-d74d1ea649c7 [ 1895.655789] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696566, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.864863] env[62820]: DEBUG oslo_vmware.api [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696567, 'name': ReconfigVM_Task, 'duration_secs': 0.830675} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.865194] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353688', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'name': 'volume-ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c8d7b7-d639-474a-b5cc-c01a6a0a79f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6', 'serial': 'ce8fe0f5-4703-4d35-897a-774c6b74f0d6'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1895.923226] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.019487] env[62820]: DEBUG oslo_vmware.api [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696570, 'name': PowerOnVM_Task, 'duration_secs': 0.432221} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.019924] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1896.020348] env[62820]: DEBUG nova.compute.manager [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1896.021512] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce3f23f-1932-475c-a518-1e7e74dc9d80 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.129938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2944c950-5268-417a-addc-d0303d0bc51f tempest-ServersTestJSON-1086903404 tempest-ServersTestJSON-1086903404-project-member] Lock "6da857ea-f213-4b17-9e9f-d74d1ea649c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.243s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.155562] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696566, 'name': ReconfigVM_Task, 'duration_secs': 1.043951} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.155866] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353674', 'volume_id': '8d042475-114b-486b-830d-875d25458b64', 'name': 'volume-8d042475-114b-486b-830d-875d25458b64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'a8803178-7fa3-42ea-824c-901063673062', 'attached_at': '2024-12-10T16:57:42.000000', 'detached_at': '', 'volume_id': '8d042475-114b-486b-830d-875d25458b64', 'serial': '8d042475-114b-486b-830d-875d25458b64'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1896.156374] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1896.157140] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fda1a2c-6c0d-4822-8d7c-665d3ae537c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.164420] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1896.164648] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfe566e6-2415-44fb-ae53-40083a001f70 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.310404] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1896.310679] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1896.310881] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleting the datastore file [datastore1] a8803178-7fa3-42ea-824c-901063673062 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1896.311173] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f78c4dc6-7e3b-4932-af6c-815a9d15591c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.317422] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1896.317422] env[62820]: value = "task-1696572" [ 1896.317422] env[62820]: _type = "Task" [ 1896.317422] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.327243] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.420381] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696569, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.422050] env[62820]: DEBUG nova.objects.instance [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lazy-loading 'flavor' on Instance uuid 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1896.544487] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.544851] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.545012] env[62820]: DEBUG nova.objects.instance [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62820) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1896.828223] env[62820]: DEBUG oslo_vmware.api [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.430935} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.828664] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1896.828806] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1896.829419] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1896.829419] env[62820]: INFO nova.compute.manager [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Took 3.32 seconds to destroy the instance on the hypervisor. [ 1896.829514] env[62820]: DEBUG oslo.service.loopingcall [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1896.830560] env[62820]: DEBUG nova.compute.manager [-] [instance: a8803178-7fa3-42ea-824c-901063673062] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1896.830560] env[62820]: DEBUG nova.network.neutron [-] [instance: a8803178-7fa3-42ea-824c-901063673062] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1896.921749] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696569, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.24384} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.922270] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk. [ 1896.922974] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c405ef5-5110-4fa4-8c8f-bf1a8aa074d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.950746] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1896.952760] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1de0240-4e40-4fbc-9542-297201e5468c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.972038] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1896.972038] env[62820]: value = "task-1696574" [ 1896.972038] env[62820]: _type = "Task" [ 1896.972038] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.981622] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696574, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.431580] env[62820]: DEBUG nova.compute.manager [req-4b3a57af-6b3e-4d80-9eb6-4ad67a16b1db req-fdf1ec4f-bfa1-4315-a8e0-509398a2866f service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Received event network-vif-deleted-f7027439-2429-4746-8bc9-a95ce975c96a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1897.431798] env[62820]: INFO nova.compute.manager [req-4b3a57af-6b3e-4d80-9eb6-4ad67a16b1db req-fdf1ec4f-bfa1-4315-a8e0-509398a2866f service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Neutron deleted interface f7027439-2429-4746-8bc9-a95ce975c96a; detaching it from the instance and deleting it from the info cache [ 1897.431976] env[62820]: DEBUG nova.network.neutron [req-4b3a57af-6b3e-4d80-9eb6-4ad67a16b1db req-fdf1ec4f-bfa1-4315-a8e0-509398a2866f service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.468066] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5dad808d-2fe4-4b7e-a4ef-1040b034ef01 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.831s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.484397] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696574, 'name': ReconfigVM_Task, 'duration_secs': 0.325166} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.485035] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5/b17619ac-779a-4463-ab94-4bb0b9ba63c1-rescue.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1897.489254] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec27d18-dbf7-4930-817e-0e62881e6ecb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.516367] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd8a02d2-324e-4cee-ae9a-0db83734cf4e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.533257] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1897.533257] env[62820]: value = "task-1696575" [ 1897.533257] env[62820]: _type = "Task" [ 1897.533257] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.541719] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696575, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.558465] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ad985d95-42bb-4bfb-926b-b8f7a0f98b62 tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.660636] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.663632] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.663632] env[62820]: INFO nova.compute.manager [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Shelving [ 1897.705846] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "44889009-b397-463f-be67-d67126d3fa5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.705846] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "44889009-b397-463f-be67-d67126d3fa5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.705846] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "44889009-b397-463f-be67-d67126d3fa5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.706092] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "44889009-b397-463f-be67-d67126d3fa5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.706268] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "44889009-b397-463f-be67-d67126d3fa5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.709871] env[62820]: INFO nova.compute.manager [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Terminating instance [ 1897.903060] env[62820]: DEBUG nova.network.neutron [-] [instance: a8803178-7fa3-42ea-824c-901063673062] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.934685] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63ad348d-1c64-4f0a-87e6-ddb24b9523b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.944443] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc391220-ef38-4b81-97b1-285fe027836c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.978297] env[62820]: DEBUG nova.compute.manager [req-4b3a57af-6b3e-4d80-9eb6-4ad67a16b1db req-fdf1ec4f-bfa1-4315-a8e0-509398a2866f service nova] [instance: a8803178-7fa3-42ea-824c-901063673062] Detach interface failed, port_id=f7027439-2429-4746-8bc9-a95ce975c96a, reason: Instance a8803178-7fa3-42ea-824c-901063673062 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1898.043809] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696575, 'name': ReconfigVM_Task, 'duration_secs': 0.154657} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.043809] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1898.044102] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a805c8a-8ae1-4711-b48c-e8e7f0f97a12 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.050661] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1898.050661] env[62820]: value = "task-1696576" [ 1898.050661] env[62820]: _type = "Task" [ 1898.050661] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.058839] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.068801] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.069079] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.069451] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.069451] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.069635] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.075021] env[62820]: INFO nova.compute.manager [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Terminating instance [ 1898.217168] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "refresh_cache-44889009-b397-463f-be67-d67126d3fa5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.217473] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquired lock "refresh_cache-44889009-b397-463f-be67-d67126d3fa5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.217784] env[62820]: DEBUG nova.network.neutron [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1898.406121] env[62820]: INFO nova.compute.manager [-] [instance: a8803178-7fa3-42ea-824c-901063673062] Took 1.58 seconds to deallocate network for instance. [ 1898.560500] env[62820]: DEBUG oslo_vmware.api [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696576, 'name': PowerOnVM_Task, 'duration_secs': 0.385193} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.560846] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1898.563588] env[62820]: DEBUG nova.compute.manager [None req-5de72eaa-7000-4d0a-8125-ba6278bcc95d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1898.564372] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60b663c-fa85-43b9-a57b-8e7da1b5c1c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.580392] env[62820]: DEBUG nova.compute.manager [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1898.580621] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1898.581400] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adca90c-8abc-402b-8733-e2ffd3db8dec {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.588756] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1898.588988] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1107225-8133-4838-9916-16c5207df3d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.595347] env[62820]: DEBUG oslo_vmware.api [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1898.595347] env[62820]: value = "task-1696577" [ 1898.595347] env[62820]: _type = "Task" [ 1898.595347] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.603512] env[62820]: DEBUG oslo_vmware.api [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.677166] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1898.677480] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9dc46c4-4b66-431e-8a00-08bc989f1cb8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.685011] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1898.685011] env[62820]: value = "task-1696578" [ 1898.685011] env[62820]: _type = "Task" [ 1898.685011] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.693892] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.740634] env[62820]: DEBUG nova.network.neutron [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1898.833813] env[62820]: DEBUG nova.network.neutron [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.911800] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1898.912066] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353697', 'volume_id': '1c9f0326-748e-4bd5-9616-8444644f2e72', 'name': 'volume-1c9f0326-748e-4bd5-9616-8444644f2e72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '901626d2-1788-4017-b0c7-52537618804c', 'attached_at': '', 'detached_at': '', 'volume_id': '1c9f0326-748e-4bd5-9616-8444644f2e72', 'serial': '1c9f0326-748e-4bd5-9616-8444644f2e72'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1898.913619] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8cfbaf-0828-4ae9-98a5-0978915a59bf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.933083] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e66ff7-0d56-449f-bf26-aa995a07aa09 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.967358] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] volume-1c9f0326-748e-4bd5-9616-8444644f2e72/volume-1c9f0326-748e-4bd5-9616-8444644f2e72.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1898.968552] env[62820]: INFO nova.compute.manager [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: a8803178-7fa3-42ea-824c-901063673062] Took 0.56 seconds to detach 1 volumes for instance. [ 1898.970867] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-689da401-3449-4a64-b1b3-178b36b13e49 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.993164] env[62820]: DEBUG oslo_vmware.api [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1898.993164] env[62820]: value = "task-1696579" [ 1898.993164] env[62820]: _type = "Task" [ 1898.993164] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.002814] env[62820]: DEBUG oslo_vmware.api [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696579, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.106122] env[62820]: DEBUG oslo_vmware.api [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696577, 'name': PowerOffVM_Task, 'duration_secs': 0.211162} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.106122] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1899.106122] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1899.106122] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e00bf6b-fdd3-47f3-b3f9-88c8d57ce535 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.194900] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696578, 'name': PowerOffVM_Task, 'duration_secs': 0.321488} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.195134] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1899.195911] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5662dbc-a486-4194-8411-565b1c517169 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.213878] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5220b945-72f6-4e57-a514-4c534c6cb156 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.279481] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1899.279781] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1899.279981] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleting the datastore file [datastore1] 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1899.280262] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ee2ba1b-ed80-4e93-a978-8792fb0faeeb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.286948] env[62820]: DEBUG oslo_vmware.api [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1899.286948] env[62820]: value = "task-1696581" [ 1899.286948] env[62820]: _type = "Task" [ 1899.286948] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.294674] env[62820]: DEBUG oslo_vmware.api [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.336792] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Releasing lock "refresh_cache-44889009-b397-463f-be67-d67126d3fa5a" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.337229] env[62820]: DEBUG nova.compute.manager [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1899.337437] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1899.338362] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df165c3e-a35d-4af7-9d36-b3abc0464489 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.345654] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1899.345874] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1be66785-aa0e-45ac-a915-bb8787a82191 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.352606] env[62820]: DEBUG oslo_vmware.api [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1899.352606] env[62820]: value = "task-1696582" [ 1899.352606] env[62820]: _type = "Task" [ 1899.352606] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.360186] env[62820]: DEBUG oslo_vmware.api [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.441646] env[62820]: INFO nova.compute.manager [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Unrescuing [ 1899.442019] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1899.442211] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquired lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1899.442410] env[62820]: DEBUG nova.network.neutron [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1899.488596] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.490670] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.490670] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.503238] env[62820]: DEBUG oslo_vmware.api [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696579, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.513672] env[62820]: INFO nova.scheduler.client.report [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted allocations for instance a8803178-7fa3-42ea-824c-901063673062 [ 1899.723965] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1899.724350] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-29bcb895-5c89-4cf6-b6b5-0fcaaa571870 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.732497] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1899.732497] env[62820]: value = "task-1696583" [ 1899.732497] env[62820]: _type = "Task" [ 1899.732497] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.742935] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696583, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.797259] env[62820]: DEBUG oslo_vmware.api [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249226} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.797519] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1899.797733] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1899.797905] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1899.798120] env[62820]: INFO nova.compute.manager [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1899.798375] env[62820]: DEBUG oslo.service.loopingcall [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1899.798595] env[62820]: DEBUG nova.compute.manager [-] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1899.798700] env[62820]: DEBUG nova.network.neutron [-] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1899.862937] env[62820]: DEBUG oslo_vmware.api [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696582, 'name': PowerOffVM_Task, 'duration_secs': 0.100027} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.863224] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1899.863397] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1899.863644] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3585d26a-8395-40c7-bdd3-f8e6dcfcd054 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.910613] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1899.910945] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1899.911205] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Deleting the datastore file [datastore1] 44889009-b397-463f-be67-d67126d3fa5a {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1899.911534] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b604b38-4112-42e2-afcd-e20c16e5b46b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.918009] env[62820]: DEBUG oslo_vmware.api [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for the task: (returnval){ [ 1899.918009] env[62820]: value = "task-1696585" [ 1899.918009] env[62820]: _type = "Task" [ 1899.918009] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.927103] env[62820]: DEBUG oslo_vmware.api [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.006944] env[62820]: DEBUG oslo_vmware.api [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696579, 'name': ReconfigVM_Task, 'duration_secs': 0.833482} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.007316] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Reconfigured VM instance instance-0000006e to attach disk [datastore1] volume-1c9f0326-748e-4bd5-9616-8444644f2e72/volume-1c9f0326-748e-4bd5-9616-8444644f2e72.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1900.012830] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b7d4874-7628-4e00-bcb9-245881a66bdc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.026433] env[62820]: DEBUG oslo_concurrency.lockutils [None req-282fe86a-79a8-4d85-b116-0f11d3835860 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "a8803178-7fa3-42ea-824c-901063673062" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.029s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.031891] env[62820]: DEBUG oslo_vmware.api [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1900.031891] env[62820]: value = "task-1696586" [ 1900.031891] env[62820]: _type = "Task" [ 1900.031891] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.040755] env[62820]: DEBUG oslo_vmware.api [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.242397] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696583, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.331605] env[62820]: DEBUG nova.network.neutron [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Updating instance_info_cache with network_info: [{"id": "fb03572c-f629-44cb-9538-f1fe53eca171", "address": "fa:16:3e:78:ed:e9", "network": {"id": "ee9f36e0-9941-4d66-882f-71d63807153f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-623108816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e2ccee293cde400f927db43f421cd50d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb03572c-f6", "ovs_interfaceid": "fb03572c-f629-44cb-9538-f1fe53eca171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.429103] env[62820]: DEBUG oslo_vmware.api [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Task: {'id': task-1696585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12799} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.429761] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1900.429914] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1900.430348] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1900.430348] env[62820]: INFO nova.compute.manager [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1900.430510] env[62820]: DEBUG oslo.service.loopingcall [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1900.430751] env[62820]: DEBUG nova.compute.manager [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1900.430873] env[62820]: DEBUG nova.network.neutron [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1900.437902] env[62820]: DEBUG nova.compute.manager [req-7dda2b64-bf42-4356-b4a4-75e9255d4afb req-a8470b54-1563-4e54-8d71-174b28df0415 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Received event network-vif-deleted-b4b5b723-be36-401c-8214-964a362697b6 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1900.438115] env[62820]: INFO nova.compute.manager [req-7dda2b64-bf42-4356-b4a4-75e9255d4afb req-a8470b54-1563-4e54-8d71-174b28df0415 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Neutron deleted interface b4b5b723-be36-401c-8214-964a362697b6; detaching it from the instance and deleting it from the info cache [ 1900.438292] env[62820]: DEBUG nova.network.neutron [req-7dda2b64-bf42-4356-b4a4-75e9255d4afb req-a8470b54-1563-4e54-8d71-174b28df0415 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.449746] env[62820]: DEBUG nova.network.neutron [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1900.542615] env[62820]: DEBUG oslo_vmware.api [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696586, 'name': ReconfigVM_Task, 'duration_secs': 0.158219} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.542926] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353697', 'volume_id': '1c9f0326-748e-4bd5-9616-8444644f2e72', 'name': 'volume-1c9f0326-748e-4bd5-9616-8444644f2e72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '901626d2-1788-4017-b0c7-52537618804c', 'attached_at': '', 'detached_at': '', 'volume_id': '1c9f0326-748e-4bd5-9616-8444644f2e72', 'serial': '1c9f0326-748e-4bd5-9616-8444644f2e72'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1900.743803] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696583, 'name': CreateSnapshot_Task, 'duration_secs': 0.879307} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.744091] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1900.744834] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6379552-d94a-4f85-bb40-abb5dcb3f637 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.834540] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Releasing lock "refresh_cache-70ef320e-16c4-4aa8-8770-4828f71868f5" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1900.835254] env[62820]: DEBUG nova.objects.instance [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lazy-loading 'flavor' on Instance uuid 70ef320e-16c4-4aa8-8770-4828f71868f5 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1900.915610] env[62820]: DEBUG nova.network.neutron [-] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.943964] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8db05eb-54c2-484e-9958-bcfec03824ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.954343] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482b4896-bd2d-4d09-8212-0ef92ba7c2d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.964943] env[62820]: DEBUG nova.network.neutron [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.985595] env[62820]: DEBUG nova.compute.manager [req-7dda2b64-bf42-4356-b4a4-75e9255d4afb req-a8470b54-1563-4e54-8d71-174b28df0415 service nova] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Detach interface failed, port_id=b4b5b723-be36-401c-8214-964a362697b6, reason: Instance 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1901.263703] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1901.263970] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-680cf9e1-a645-406e-b499-29ddcb952ea9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.274229] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1901.274229] env[62820]: value = "task-1696587" [ 1901.274229] env[62820]: _type = "Task" [ 1901.274229] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.283031] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696587, 'name': CloneVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.342078] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c62f4ae-7e84-457a-add2-1c7bfeb893e5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.372700] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1901.373194] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a27a0ae-baff-4b4d-9bcd-3968a400f6c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.381525] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1901.381525] env[62820]: value = "task-1696588" [ 1901.381525] env[62820]: _type = "Task" [ 1901.381525] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.390255] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696588, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.419073] env[62820]: INFO nova.compute.manager [-] [instance: 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8] Took 1.62 seconds to deallocate network for instance. [ 1901.468016] env[62820]: INFO nova.compute.manager [-] [instance: 44889009-b397-463f-be67-d67126d3fa5a] Took 1.04 seconds to deallocate network for instance. [ 1901.533358] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.533358] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.578603] env[62820]: DEBUG nova.objects.instance [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1901.784807] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696587, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.891498] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696588, 'name': PowerOffVM_Task, 'duration_secs': 0.179631} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.891767] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1901.897099] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1901.897394] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c928b477-9731-4181-a4f9-ce0ded2043f2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.916268] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1901.916268] env[62820]: value = "task-1696589" [ 1901.916268] env[62820]: _type = "Task" [ 1901.916268] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.924481] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696589, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.925413] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.925648] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.925873] env[62820]: DEBUG nova.objects.instance [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lazy-loading 'resources' on Instance uuid 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1901.975791] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.036106] env[62820]: DEBUG nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1902.084035] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f430f7e5-c733-46ee-b340-9f8f4b8a65e5 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.825s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.287028] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696587, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.427150] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696589, 'name': ReconfigVM_Task, 'duration_secs': 0.20889} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.430980] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1902.431286] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1902.431798] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec41c9e1-6371-4130-b677-7469c9ea2aef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.439433] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1902.439433] env[62820]: value = "task-1696590" [ 1902.439433] env[62820]: _type = "Task" [ 1902.439433] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.447329] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.555029] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.564519] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff5e4dc-ffb8-4d51-bb70-b4a4d53b5627 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.571853] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e219e808-b478-403f-b276-d922e5edaecc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.603713] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ced0ec-966c-4ad1-b3f6-70a980acd4f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.611306] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb033e54-6015-4c3a-8e67-bc0662a9366d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.624726] env[62820]: DEBUG nova.compute.provider_tree [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1902.638708] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.638947] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.639127] env[62820]: DEBUG nova.compute.manager [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1902.639985] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdab4b91-547e-467d-aea1-53178364c08a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.646579] env[62820]: DEBUG nova.compute.manager [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1902.647122] env[62820]: DEBUG nova.objects.instance [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1902.786979] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696587, 'name': CloneVM_Task, 'duration_secs': 1.436061} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.787364] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Created linked-clone VM from snapshot [ 1902.788092] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b4eced-c992-4453-bb63-b94338385d59 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.795633] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Uploading image 8502cdb1-2a78-4742-9247-16bbadfc0ad6 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1902.822823] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1902.822823] env[62820]: value = "vm-353699" [ 1902.822823] env[62820]: _type = "VirtualMachine" [ 1902.822823] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1902.823140] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-db9c5fd1-1fca-47a3-a452-2d7b347eaeef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.831156] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lease: (returnval){ [ 1902.831156] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523bed4e-cd3d-3346-d5d2-49adae6ff870" [ 1902.831156] env[62820]: _type = "HttpNfcLease" [ 1902.831156] env[62820]: } obtained for exporting VM: (result){ [ 1902.831156] env[62820]: value = "vm-353699" [ 1902.831156] env[62820]: _type = "VirtualMachine" [ 1902.831156] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1902.831489] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the lease: (returnval){ [ 1902.831489] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523bed4e-cd3d-3346-d5d2-49adae6ff870" [ 1902.831489] env[62820]: _type = "HttpNfcLease" [ 1902.831489] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1902.838039] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1902.838039] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523bed4e-cd3d-3346-d5d2-49adae6ff870" [ 1902.838039] env[62820]: _type = "HttpNfcLease" [ 1902.838039] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1902.949594] env[62820]: DEBUG oslo_vmware.api [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696590, 'name': PowerOnVM_Task, 'duration_secs': 0.489314} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.949594] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1902.949859] env[62820]: DEBUG nova.compute.manager [None req-a7a6e93b-8b10-4194-a3f9-44598230c97d tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1902.950506] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324f7282-96e4-4d51-bdf3-0d999f89a4ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.128073] env[62820]: DEBUG nova.scheduler.client.report [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1903.340605] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1903.340605] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523bed4e-cd3d-3346-d5d2-49adae6ff870" [ 1903.340605] env[62820]: _type = "HttpNfcLease" [ 1903.340605] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1903.340901] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1903.340901] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523bed4e-cd3d-3346-d5d2-49adae6ff870" [ 1903.340901] env[62820]: _type = "HttpNfcLease" [ 1903.340901] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1903.341657] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9f56d3-68f1-4149-9ca2-ca56d6fa93e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.350535] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d43a42-6976-623f-761d-437fadfd7fb9/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1903.350781] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d43a42-6976-623f-761d-437fadfd7fb9/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1903.451800] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-340da006-5772-4f32-b011-1f9d8135a6e6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.632994] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.707s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.635680] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.660s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.635680] env[62820]: DEBUG nova.objects.instance [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lazy-loading 'resources' on Instance uuid 44889009-b397-463f-be67-d67126d3fa5a {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1903.653029] env[62820]: INFO nova.scheduler.client.report [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted allocations for instance 25c8d7b7-d639-474a-b5cc-c01a6a0a79f8 [ 1903.655759] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1903.659159] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5ba1d20-dc2f-41e0-afbc-6d7d4e17b907 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.668883] env[62820]: DEBUG oslo_vmware.api [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1903.668883] env[62820]: value = "task-1696592" [ 1903.668883] env[62820]: _type = "Task" [ 1903.668883] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.685902] env[62820]: DEBUG oslo_vmware.api [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.164803] env[62820]: DEBUG oslo_concurrency.lockutils [None req-15fdc4cc-4d8a-4c03-85f2-2dd4698cf511 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "25c8d7b7-d639-474a-b5cc-c01a6a0a79f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.096s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.179835] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "70ef320e-16c4-4aa8-8770-4828f71868f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.180116] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "70ef320e-16c4-4aa8-8770-4828f71868f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.180333] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "70ef320e-16c4-4aa8-8770-4828f71868f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.180539] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "70ef320e-16c4-4aa8-8770-4828f71868f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.180740] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "70ef320e-16c4-4aa8-8770-4828f71868f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.186271] env[62820]: DEBUG oslo_vmware.api [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696592, 'name': PowerOffVM_Task, 'duration_secs': 0.288533} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.188932] env[62820]: INFO nova.compute.manager [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Terminating instance [ 1904.190280] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1904.190478] env[62820]: DEBUG nova.compute.manager [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1904.192614] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd42ee6-37e4-4303-a86f-97d0f84cb303 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.280344] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f965e59-8f22-4e1b-aa05-b1f250b173ff {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.288354] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713eec93-ee6f-47ae-9b40-a6c768e76ab6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.323095] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79c64c9-9259-4cc6-93b5-6c30a11c2392 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.333942] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53325cf-332c-40ce-abaf-901fc1b15303 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.350675] env[62820]: DEBUG nova.compute.provider_tree [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1904.697349] env[62820]: DEBUG nova.compute.manager [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1904.697623] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1904.698851] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89c29a1-9fca-4ea2-9e9a-926340213ea9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.709248] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1904.709855] env[62820]: DEBUG oslo_concurrency.lockutils [None req-84286800-8879-48a1-a93c-f765d678078e tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.071s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.710728] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42c5b31f-68fb-4420-9a88-3d36839d68ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.723429] env[62820]: DEBUG oslo_vmware.api [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1904.723429] env[62820]: value = "task-1696593" [ 1904.723429] env[62820]: _type = "Task" [ 1904.723429] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.735608] env[62820]: DEBUG oslo_vmware.api [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696593, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.855480] env[62820]: DEBUG nova.scheduler.client.report [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1905.156616] env[62820]: DEBUG nova.objects.instance [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1905.233704] env[62820]: DEBUG oslo_vmware.api [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696593, 'name': PowerOffVM_Task, 'duration_secs': 0.242336} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.234741] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1905.234741] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1905.234926] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48e20740-4dc5-4016-9d4e-e55d9400a719 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.354436] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1905.354731] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1905.355010] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Deleting the datastore file [datastore1] 70ef320e-16c4-4aa8-8770-4828f71868f5 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1905.355345] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46fa1d74-ea27-4d53-be39-30eb26b430c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.362076] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.727s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.364379] env[62820]: DEBUG oslo_vmware.api [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1905.364379] env[62820]: value = "task-1696595" [ 1905.364379] env[62820]: _type = "Task" [ 1905.364379] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.364854] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.810s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.366765] env[62820]: INFO nova.compute.claims [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1905.378198] env[62820]: DEBUG oslo_vmware.api [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.390044] env[62820]: INFO nova.scheduler.client.report [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Deleted allocations for instance 44889009-b397-463f-be67-d67126d3fa5a [ 1905.662216] env[62820]: DEBUG oslo_concurrency.lockutils [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.662403] env[62820]: DEBUG oslo_concurrency.lockutils [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.662531] env[62820]: DEBUG nova.network.neutron [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1905.662704] env[62820]: DEBUG nova.objects.instance [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'info_cache' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1905.882064] env[62820]: DEBUG oslo_vmware.api [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17711} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.882341] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1905.882527] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1905.882709] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1905.882886] env[62820]: INFO nova.compute.manager [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1905.883152] env[62820]: DEBUG oslo.service.loopingcall [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1905.883354] env[62820]: DEBUG nova.compute.manager [-] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1905.883449] env[62820]: DEBUG nova.network.neutron [-] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1905.900743] env[62820]: DEBUG oslo_concurrency.lockutils [None req-69a5c1e3-553a-4fac-97b6-a6bd0c2f6caf tempest-ServerShowV257Test-890976395 tempest-ServerShowV257Test-890976395-project-member] Lock "44889009-b397-463f-be67-d67126d3fa5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.195s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.011789] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "7a923678-5eea-4149-9a6d-0594fdb532c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.012081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.166365] env[62820]: DEBUG nova.objects.base [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Object Instance<901626d2-1788-4017-b0c7-52537618804c> lazy-loaded attributes: flavor,info_cache {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1906.238968] env[62820]: DEBUG nova.compute.manager [req-4175af18-9407-4dfc-becb-dae2a6d05898 req-af0ff3dd-4644-4a2d-bb83-24e4063e704d service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Received event network-vif-deleted-fb03572c-f629-44cb-9538-f1fe53eca171 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1906.239356] env[62820]: INFO nova.compute.manager [req-4175af18-9407-4dfc-becb-dae2a6d05898 req-af0ff3dd-4644-4a2d-bb83-24e4063e704d service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Neutron deleted interface fb03572c-f629-44cb-9538-f1fe53eca171; detaching it from the instance and deleting it from the info cache [ 1906.239745] env[62820]: DEBUG nova.network.neutron [req-4175af18-9407-4dfc-becb-dae2a6d05898 req-af0ff3dd-4644-4a2d-bb83-24e4063e704d service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.514324] env[62820]: DEBUG nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1906.519098] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb504841-5151-488a-ac56-028c082a8cc5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.528956] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc7ceb3-f8da-4a02-b238-b422f9ba4084 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.563426] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7c113b-5099-441d-a807-fc8f94b175d4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.571702] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5f3fb4-c20c-4b07-a395-cbb9212562af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.585491] env[62820]: DEBUG nova.compute.provider_tree [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1906.709408] env[62820]: DEBUG nova.network.neutron [-] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.745085] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-790c4ece-53b1-4222-9091-e9e734ff473f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.755666] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065ed092-8e6f-4a10-bcaa-0a78803dc2c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.787759] env[62820]: DEBUG nova.compute.manager [req-4175af18-9407-4dfc-becb-dae2a6d05898 req-af0ff3dd-4644-4a2d-bb83-24e4063e704d service nova] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Detach interface failed, port_id=fb03572c-f629-44cb-9538-f1fe53eca171, reason: Instance 70ef320e-16c4-4aa8-8770-4828f71868f5 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1906.907009] env[62820]: DEBUG nova.network.neutron [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updating instance_info_cache with network_info: [{"id": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "address": "fa:16:3e:96:64:a6", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3537ab9-0a", "ovs_interfaceid": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.039905] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.088772] env[62820]: DEBUG nova.scheduler.client.report [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1907.212091] env[62820]: INFO nova.compute.manager [-] [instance: 70ef320e-16c4-4aa8-8770-4828f71868f5] Took 1.33 seconds to deallocate network for instance. [ 1907.409758] env[62820]: DEBUG oslo_concurrency.lockutils [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Releasing lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.594534] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.230s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.595186] env[62820]: DEBUG nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1907.598012] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.558s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.599481] env[62820]: INFO nova.compute.claims [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1907.718207] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.104246] env[62820]: DEBUG nova.compute.utils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1908.107798] env[62820]: DEBUG nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1908.108082] env[62820]: DEBUG nova.network.neutron [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1908.155947] env[62820]: DEBUG nova.policy [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd41e844bb294c6ab6e3869af994f60a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fef128f5c704730b335b62f6cce0416', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1908.417177] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1908.417177] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1cc4a031-fa6f-4b85-b1ee-e68cf0c0e36d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.425029] env[62820]: DEBUG oslo_vmware.api [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1908.425029] env[62820]: value = "task-1696596" [ 1908.425029] env[62820]: _type = "Task" [ 1908.425029] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.430503] env[62820]: DEBUG nova.network.neutron [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Successfully created port: 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1908.435464] env[62820]: DEBUG oslo_vmware.api [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.609277] env[62820]: DEBUG nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1908.747051] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a105162-4df6-4fe8-884d-990f624d5093 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.755196] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1faa0564-447a-467c-a7c3-22c523d241c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.787521] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2f5f1e-b3b4-4a90-9826-0c24beb2928f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.795555] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d357bfed-f9ba-4c1a-85fc-08aa4dfb4a94 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.809441] env[62820]: DEBUG nova.compute.provider_tree [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1908.937052] env[62820]: DEBUG oslo_vmware.api [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696596, 'name': PowerOnVM_Task, 'duration_secs': 0.437952} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.937377] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1908.937675] env[62820]: DEBUG nova.compute.manager [None req-66d0a298-51fa-4964-b60b-712b491cfe59 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1908.938467] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51363a11-7bd2-4110-a547-761bb13d2f55 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.313277] env[62820]: DEBUG nova.scheduler.client.report [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1909.621517] env[62820]: DEBUG nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1909.651070] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1909.651407] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1909.651594] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1909.651930] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1909.652142] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1909.652305] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1909.652662] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1909.652874] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1909.653069] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1909.653384] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1909.653599] env[62820]: DEBUG nova.virt.hardware [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1909.654826] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbdcf01-b176-4ba5-ba1d-8510fbf14063 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.665224] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae491e0c-95ac-40bb-bafe-48695f7b18e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.819229] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.819887] env[62820]: DEBUG nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1909.823714] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.105s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.823714] env[62820]: DEBUG nova.objects.instance [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lazy-loading 'resources' on Instance uuid 70ef320e-16c4-4aa8-8770-4828f71868f5 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1910.106445] env[62820]: DEBUG nova.compute.manager [req-a333dbd3-7db1-40d8-8e59-0287dd33a715 req-339ca1c5-c84b-4ed7-b314-e0a8e21a634c service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received event network-vif-plugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1910.106682] env[62820]: DEBUG oslo_concurrency.lockutils [req-a333dbd3-7db1-40d8-8e59-0287dd33a715 req-339ca1c5-c84b-4ed7-b314-e0a8e21a634c service nova] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.106900] env[62820]: DEBUG oslo_concurrency.lockutils [req-a333dbd3-7db1-40d8-8e59-0287dd33a715 req-339ca1c5-c84b-4ed7-b314-e0a8e21a634c service nova] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.107097] env[62820]: DEBUG oslo_concurrency.lockutils [req-a333dbd3-7db1-40d8-8e59-0287dd33a715 req-339ca1c5-c84b-4ed7-b314-e0a8e21a634c service nova] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.107257] env[62820]: DEBUG nova.compute.manager [req-a333dbd3-7db1-40d8-8e59-0287dd33a715 req-339ca1c5-c84b-4ed7-b314-e0a8e21a634c service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] No waiting events found dispatching network-vif-plugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1910.107425] env[62820]: WARNING nova.compute.manager [req-a333dbd3-7db1-40d8-8e59-0287dd33a715 req-339ca1c5-c84b-4ed7-b314-e0a8e21a634c service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received unexpected event network-vif-plugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 for instance with vm_state building and task_state spawning. [ 1910.199241] env[62820]: DEBUG nova.network.neutron [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Successfully updated port: 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1910.327015] env[62820]: DEBUG nova.compute.utils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1910.332618] env[62820]: DEBUG nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1910.332934] env[62820]: DEBUG nova.network.neutron [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1910.377724] env[62820]: DEBUG nova.policy [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815f8967d40e4943a66da6866de8b018', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14768f5b38ea4f6abf5583ce5e4409f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1910.467445] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b60b36-21aa-42a6-bb4b-c0f439efebc3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.477030] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dedbe2-869c-4bfa-a098-5fa5aaa56481 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.512934] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3865b20-6bb9-4bbe-a0d3-e68837e8ff74 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.520408] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b89d7c6-8695-4307-ac08-1c88ebfbdef7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.535110] env[62820]: DEBUG nova.compute.provider_tree [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1910.662831] env[62820]: DEBUG nova.network.neutron [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Successfully created port: 56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1910.701078] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.701263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.701423] env[62820]: DEBUG nova.network.neutron [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1910.747610] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d43a42-6976-623f-761d-437fadfd7fb9/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1910.748634] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1944288c-5fad-463b-82ad-3ba813aea0fd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.755207] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d43a42-6976-623f-761d-437fadfd7fb9/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1910.755422] env[62820]: ERROR oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d43a42-6976-623f-761d-437fadfd7fb9/disk-0.vmdk due to incomplete transfer. [ 1910.757076] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-03ff7f82-6cf2-4f5e-82a0-531a9ec89b4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.762878] env[62820]: DEBUG oslo_vmware.rw_handles [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d43a42-6976-623f-761d-437fadfd7fb9/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1910.763129] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Uploaded image 8502cdb1-2a78-4742-9247-16bbadfc0ad6 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1910.765559] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1910.765832] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-24e8f506-82c1-4667-bd0f-e42f1f5f3bb9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.771832] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1910.771832] env[62820]: value = "task-1696597" [ 1910.771832] env[62820]: _type = "Task" [ 1910.771832] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.781248] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696597, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.833248] env[62820]: DEBUG nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1910.882167] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "4d69baaa-83da-4c5f-b88f-928693505520" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.882406] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.987525] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.987821] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.987977] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1911.039036] env[62820]: DEBUG nova.scheduler.client.report [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1911.239253] env[62820]: DEBUG nova.network.neutron [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1911.283908] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696597, 'name': Destroy_Task, 'duration_secs': 0.440282} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.283908] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Destroyed the VM [ 1911.284115] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1911.284419] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-13fd3817-38f1-47f2-8021-aa5dcbcf73d8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.291159] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1911.291159] env[62820]: value = "task-1696598" [ 1911.291159] env[62820]: _type = "Task" [ 1911.291159] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.299484] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696598, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.382646] env[62820]: DEBUG nova.network.neutron [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [{"id": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "address": "fa:16:3e:1f:49:02", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ebb702-61", "ovs_interfaceid": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.386669] env[62820]: DEBUG nova.compute.utils [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1911.545734] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.722s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.567881] env[62820]: INFO nova.scheduler.client.report [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Deleted allocations for instance 70ef320e-16c4-4aa8-8770-4828f71868f5 [ 1911.582969] env[62820]: DEBUG nova.compute.manager [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1911.801189] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696598, 'name': RemoveSnapshot_Task, 'duration_secs': 0.37669} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.801559] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1911.801755] env[62820]: DEBUG nova.compute.manager [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1911.802538] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38263674-5dfd-491c-b7bf-b7e91567582c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.846220] env[62820]: DEBUG nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1911.873495] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1911.873747] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1911.873909] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1911.874146] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1911.874325] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1911.874478] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1911.874682] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1911.874870] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1911.875016] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1911.875235] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1911.875409] env[62820]: DEBUG nova.virt.hardware [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1911.876309] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7f1555-9702-47fa-aff4-06e3e169f1ad {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.884677] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5dce755-6895-452a-b8f0-08f1aa831701 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.888897] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.889218] env[62820]: DEBUG nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Instance network_info: |[{"id": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "address": "fa:16:3e:1f:49:02", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ebb702-61", "ovs_interfaceid": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1911.889763] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.890399] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:49:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1911.898191] env[62820]: DEBUG oslo.service.loopingcall [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1911.898819] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1911.899757] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8907323-c71a-4d85-9030-dee06019f301 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.929881] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1911.929881] env[62820]: value = "task-1696599" [ 1911.929881] env[62820]: _type = "Task" [ 1911.929881] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.937749] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696599, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.075402] env[62820]: DEBUG oslo_concurrency.lockutils [None req-20657775-b9cc-44c6-b05c-df39ec08677f tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "70ef320e-16c4-4aa8-8770-4828f71868f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.895s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.103459] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.103725] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.204222] env[62820]: DEBUG nova.compute.manager [req-d163c0dd-0a06-4a13-baf0-efcd61643513 req-35dd8a63-8fbe-4af0-9782-cf67c5b0d470 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received event network-changed-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1912.204457] env[62820]: DEBUG nova.compute.manager [req-d163c0dd-0a06-4a13-baf0-efcd61643513 req-35dd8a63-8fbe-4af0-9782-cf67c5b0d470 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Refreshing instance network info cache due to event network-changed-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1912.204650] env[62820]: DEBUG oslo_concurrency.lockutils [req-d163c0dd-0a06-4a13-baf0-efcd61643513 req-35dd8a63-8fbe-4af0-9782-cf67c5b0d470 service nova] Acquiring lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.204797] env[62820]: DEBUG oslo_concurrency.lockutils [req-d163c0dd-0a06-4a13-baf0-efcd61643513 req-35dd8a63-8fbe-4af0-9782-cf67c5b0d470 service nova] Acquired lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.204958] env[62820]: DEBUG nova.network.neutron [req-d163c0dd-0a06-4a13-baf0-efcd61643513 req-35dd8a63-8fbe-4af0-9782-cf67c5b0d470 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Refreshing network info cache for port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1912.314439] env[62820]: INFO nova.compute.manager [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Shelve offloading [ 1912.322436] env[62820]: DEBUG nova.network.neutron [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Successfully updated port: 56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1912.439663] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696599, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.526491] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.526649] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.526797] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1912.608615] env[62820]: INFO nova.compute.claims [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1912.818614] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1912.818877] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5131a29d-cadd-44c4-bf56-2caee45a1c4f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.825017] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.825017] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.825166] env[62820]: DEBUG nova.network.neutron [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.827138] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1912.827138] env[62820]: value = "task-1696600" [ 1912.827138] env[62820]: _type = "Task" [ 1912.827138] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.836107] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1912.836265] env[62820]: DEBUG nova.compute.manager [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1912.839020] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1b161a-25cf-438b-9070-9d8fe18d3b3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.845222] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.845385] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.845576] env[62820]: DEBUG nova.network.neutron [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.901691] env[62820]: DEBUG nova.network.neutron [req-d163c0dd-0a06-4a13-baf0-efcd61643513 req-35dd8a63-8fbe-4af0-9782-cf67c5b0d470 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updated VIF entry in instance network info cache for port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1912.902083] env[62820]: DEBUG nova.network.neutron [req-d163c0dd-0a06-4a13-baf0-efcd61643513 req-35dd8a63-8fbe-4af0-9782-cf67c5b0d470 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [{"id": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "address": "fa:16:3e:1f:49:02", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ebb702-61", "ovs_interfaceid": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.940200] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696599, 'name': CreateVM_Task, 'duration_secs': 0.791248} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.940352] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1912.946983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.947167] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.947484] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1912.948015] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b948ef9f-a17e-4249-b31c-e3700a24a968 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.952772] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1912.952772] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f5eabc-04b8-e096-09cd-82caede3ddba" [ 1912.952772] env[62820]: _type = "Task" [ 1912.952772] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.956307] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "4d69baaa-83da-4c5f-b88f-928693505520" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.956526] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.956747] env[62820]: INFO nova.compute.manager [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Attaching volume 86af3623-ba32-4685-98de-cf2fde2698cd to /dev/sdb [ 1912.961266] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f5eabc-04b8-e096-09cd-82caede3ddba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.991456] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5518e706-9644-4f46-ab87-02c034095d2b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.998144] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6507b22-7f6e-4657-8057-1c615f49c017 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.002298] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.002515] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.002717] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "c15bbb69-84a0-4fda-a509-66218b9c9f70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.002896] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.003073] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.005056] env[62820]: INFO nova.compute.manager [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Terminating instance [ 1913.011980] env[62820]: DEBUG nova.virt.block_device [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Updating existing volume attachment record: d3b83204-2ece-4dd6-af5e-398a3711a1cc {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1913.115173] env[62820]: INFO nova.compute.resource_tracker [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating resource usage from migration 94dd1254-4c4e-4010-a069-eefe1dc83c3e [ 1913.235514] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760f67b7-53d5-4d64-8cfc-32b8757639e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.243287] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc182d92-b8e0-44d4-a628-96f759b3e255 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.274127] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7831eb-2a2f-44f2-896e-b3d4ace3d3af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.281828] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce89b06-e91b-4a3d-9d62-b24555f7abed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.296099] env[62820]: DEBUG nova.compute.provider_tree [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.362318] env[62820]: DEBUG nova.network.neutron [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1913.404050] env[62820]: DEBUG oslo_concurrency.lockutils [req-d163c0dd-0a06-4a13-baf0-efcd61643513 req-35dd8a63-8fbe-4af0-9782-cf67c5b0d470 service nova] Releasing lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.468805] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f5eabc-04b8-e096-09cd-82caede3ddba, 'name': SearchDatastore_Task, 'duration_secs': 0.02265} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.469262] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.469514] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1913.469854] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.470124] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.470374] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1913.470713] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7502abe-853f-4229-9892-3c95e741eb2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.479927] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1913.480147] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1913.480862] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1e2ccb5-396e-4bfe-a88f-919d1f05d787 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.486387] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1913.486387] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52f30e23-f288-f04c-5966-90e325331a41" [ 1913.486387] env[62820]: _type = "Task" [ 1913.486387] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.499847] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52f30e23-f288-f04c-5966-90e325331a41, 'name': SearchDatastore_Task, 'duration_secs': 0.008425} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.503748] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de2bea48-4ca1-4414-a216-825b7eb884d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.509704] env[62820]: DEBUG nova.compute.manager [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1913.509960] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1913.510290] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1913.510290] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523916a6-4879-1cb9-3ae9-d948300f9943" [ 1913.510290] env[62820]: _type = "Task" [ 1913.510290] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.510982] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564f01d6-14e6-428e-80d9-746a214085dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.523431] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1913.526619] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbd2a856-5950-45ff-8ac8-ae480f06ba17 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.527996] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523916a6-4879-1cb9-3ae9-d948300f9943, 'name': SearchDatastore_Task, 'duration_secs': 0.009191} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.528249] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.528489] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2/72cdf2b2-fb69-4820-a663-56bfe92572d2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1913.528967] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf913078-5fc1-45be-b49d-9206dcdd68b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.533520] env[62820]: DEBUG oslo_vmware.api [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1913.533520] env[62820]: value = "task-1696602" [ 1913.533520] env[62820]: _type = "Task" [ 1913.533520] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.537056] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1913.537056] env[62820]: value = "task-1696603" [ 1913.537056] env[62820]: _type = "Task" [ 1913.537056] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.542977] env[62820]: DEBUG oslo_vmware.api [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696602, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.547383] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.610286] env[62820]: DEBUG nova.network.neutron [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Updating instance_info_cache with network_info: [{"id": "56792423-7b5c-472d-8d0c-85c04d5dfe61", "address": "fa:16:3e:82:8a:1e", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56792423-7b", "ovs_interfaceid": "56792423-7b5c-472d-8d0c-85c04d5dfe61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.799281] env[62820]: DEBUG nova.scheduler.client.report [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1913.850556] env[62820]: DEBUG nova.network.neutron [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [{"id": "7f7affc8-f587-4484-9eef-211d6ea80226", "address": "fa:16:3e:e1:6f:92", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7affc8-f5", "ovs_interfaceid": "7f7affc8-f587-4484-9eef-211d6ea80226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.957317] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updating instance_info_cache with network_info: [{"id": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "address": "fa:16:3e:03:5b:10", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6e11a8-08", "ovs_interfaceid": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.047719] env[62820]: DEBUG oslo_vmware.api [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696602, 'name': PowerOffVM_Task, 'duration_secs': 0.205009} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.050856] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1914.050975] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1914.051248] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473572} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.051440] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b22c2eda-668d-450b-a703-a6c56980c85b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.052811] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2/72cdf2b2-fb69-4820-a663-56bfe92572d2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1914.053022] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1914.053247] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-078394f4-3801-4307-a80b-14cbb6e43091 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.059875] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1914.059875] env[62820]: value = "task-1696605" [ 1914.059875] env[62820]: _type = "Task" [ 1914.059875] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.068935] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696605, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.113211] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.113667] env[62820]: DEBUG nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Instance network_info: |[{"id": "56792423-7b5c-472d-8d0c-85c04d5dfe61", "address": "fa:16:3e:82:8a:1e", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56792423-7b", "ovs_interfaceid": "56792423-7b5c-472d-8d0c-85c04d5dfe61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1914.114191] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:8a:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56792423-7b5c-472d-8d0c-85c04d5dfe61', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1914.122067] env[62820]: DEBUG oslo.service.loopingcall [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1914.122362] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1914.122682] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5ebf431-3b02-44a4-8a26-d8e61108bbd6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.143569] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1914.143569] env[62820]: value = "task-1696606" [ 1914.143569] env[62820]: _type = "Task" [ 1914.143569] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.151198] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696606, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.190283] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1914.190501] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1914.190689] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Deleting the datastore file [datastore1] c15bbb69-84a0-4fda-a509-66218b9c9f70 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1914.190964] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f66d42b-8ab3-402b-ac22-d664f24434b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.197268] env[62820]: DEBUG oslo_vmware.api [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for the task: (returnval){ [ 1914.197268] env[62820]: value = "task-1696607" [ 1914.197268] env[62820]: _type = "Task" [ 1914.197268] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.205565] env[62820]: DEBUG oslo_vmware.api [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.233538] env[62820]: DEBUG nova.compute.manager [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Received event network-vif-plugged-56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1914.233773] env[62820]: DEBUG oslo_concurrency.lockutils [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] Acquiring lock "7a923678-5eea-4149-9a6d-0594fdb532c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.234029] env[62820]: DEBUG oslo_concurrency.lockutils [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.234390] env[62820]: DEBUG oslo_concurrency.lockutils [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.234686] env[62820]: DEBUG nova.compute.manager [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] No waiting events found dispatching network-vif-plugged-56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1914.235011] env[62820]: WARNING nova.compute.manager [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Received unexpected event network-vif-plugged-56792423-7b5c-472d-8d0c-85c04d5dfe61 for instance with vm_state building and task_state spawning. [ 1914.235279] env[62820]: DEBUG nova.compute.manager [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Received event network-changed-56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1914.235509] env[62820]: DEBUG nova.compute.manager [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Refreshing instance network info cache due to event network-changed-56792423-7b5c-472d-8d0c-85c04d5dfe61. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1914.235798] env[62820]: DEBUG oslo_concurrency.lockutils [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] Acquiring lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.235968] env[62820]: DEBUG oslo_concurrency.lockutils [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] Acquired lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.236225] env[62820]: DEBUG nova.network.neutron [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Refreshing network info cache for port 56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1914.304735] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.201s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.304950] env[62820]: INFO nova.compute.manager [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Migrating [ 1914.353313] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.460414] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.460655] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 1914.460950] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.461247] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.461418] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.461573] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.461767] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.461950] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.571886] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696605, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213251} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.572197] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1914.572959] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82df3af4-ef51-4156-b668-b9dbb68e0095 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.594995] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2/72cdf2b2-fb69-4820-a663-56bfe92572d2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1914.594995] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53f046c1-6771-4d36-840e-30ddb71d7039 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.614859] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1914.614859] env[62820]: value = "task-1696608" [ 1914.614859] env[62820]: _type = "Task" [ 1914.614859] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.623673] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696608, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.652436] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696606, 'name': CreateVM_Task} progress is 25%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.663432] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1914.664300] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d46bc6-249a-4307-893c-e0026bb22e37 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.671764] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1914.671998] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93de0ec4-3f48-4095-bb88-76880c6bf230 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.707763] env[62820]: DEBUG oslo_vmware.api [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.781611] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1914.782086] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1914.782086] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleting the datastore file [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1914.782286] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f70e2c4a-b441-4577-8745-1252d6a9b6f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.789605] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1914.789605] env[62820]: value = "task-1696610" [ 1914.789605] env[62820]: _type = "Task" [ 1914.789605] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.796961] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.819396] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.819571] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.819836] env[62820]: DEBUG nova.network.neutron [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.966887] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Getting list of instances from cluster (obj){ [ 1914.966887] env[62820]: value = "domain-c8" [ 1914.966887] env[62820]: _type = "ClusterComputeResource" [ 1914.966887] env[62820]: } {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1914.968510] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f23f484-c9dc-4786-9978-8a9cdacaaac3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.983568] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Got total of 6 instances {{(pid=62820) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1914.983725] env[62820]: WARNING nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] While synchronizing instance power states, found 8 instances in the database and 6 instances on the hypervisor. [ 1914.983861] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Triggering sync for uuid 15b6eda1-db87-45d1-a0c6-320386b02e12 {{(pid=62820) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10696}} [ 1914.984087] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Triggering sync for uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10696}} [ 1914.984214] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Triggering sync for uuid 2aeeb809-0b27-411b-b632-ef4d61b295df {{(pid=62820) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10696}} [ 1914.984362] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Triggering sync for uuid c15bbb69-84a0-4fda-a509-66218b9c9f70 {{(pid=62820) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10696}} [ 1914.984505] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Triggering sync for uuid 41666e62-526d-4553-a005-07cbc2321d0d {{(pid=62820) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10696}} [ 1914.984649] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Triggering sync for uuid 4d69baaa-83da-4c5f-b88f-928693505520 {{(pid=62820) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10696}} [ 1914.985664] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Triggering sync for uuid 72cdf2b2-fb69-4820-a663-56bfe92572d2 {{(pid=62820) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10696}} [ 1914.985664] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Triggering sync for uuid 7a923678-5eea-4149-9a6d-0594fdb532c8 {{(pid=62820) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10696}} [ 1914.986070] env[62820]: DEBUG nova.network.neutron [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Updated VIF entry in instance network info cache for port 56792423-7b5c-472d-8d0c-85c04d5dfe61. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1914.987142] env[62820]: DEBUG nova.network.neutron [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Updating instance_info_cache with network_info: [{"id": "56792423-7b5c-472d-8d0c-85c04d5dfe61", "address": "fa:16:3e:82:8a:1e", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56792423-7b", "ovs_interfaceid": "56792423-7b5c-472d-8d0c-85c04d5dfe61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.987444] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "15b6eda1-db87-45d1-a0c6-320386b02e12" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.987677] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.987966] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.988169] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "901626d2-1788-4017-b0c7-52537618804c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.988426] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.988639] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.988840] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "41666e62-526d-4553-a005-07cbc2321d0d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.989032] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "41666e62-526d-4553-a005-07cbc2321d0d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.989199] env[62820]: INFO nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1914.989367] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "41666e62-526d-4553-a005-07cbc2321d0d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.989548] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "4d69baaa-83da-4c5f-b88f-928693505520" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.989805] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.990029] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "7a923678-5eea-4149-9a6d-0594fdb532c8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.990214] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.990346] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1914.991283] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afb7b70-df0b-404b-bd4f-c71261b83f7c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.994482] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844f483d-de00-405e-a85b-61a7f6deb276 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.996939] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.125175] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696608, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.152868] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696606, 'name': CreateVM_Task, 'duration_secs': 0.943497} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.153039] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1915.153771] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.153957] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.154291] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1915.154566] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88590f8b-697d-4b11-aa55-ab21983336e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.159077] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1915.159077] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52a855f0-499d-38b3-add7-05f197b59124" [ 1915.159077] env[62820]: _type = "Task" [ 1915.159077] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.166965] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a855f0-499d-38b3-add7-05f197b59124, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.210657] env[62820]: DEBUG oslo_vmware.api [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Task: {'id': task-1696607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.830479} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.210657] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1915.210757] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1915.212054] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1915.212054] env[62820]: INFO nova.compute.manager [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1915.212054] env[62820]: DEBUG oslo.service.loopingcall [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1915.212054] env[62820]: DEBUG nova.compute.manager [-] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1915.212054] env[62820]: DEBUG nova.network.neutron [-] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1915.304029] env[62820]: DEBUG oslo_vmware.api [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402848} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.304029] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1915.304029] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1915.304029] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1915.337569] env[62820]: INFO nova.scheduler.client.report [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted allocations for instance 2aeeb809-0b27-411b-b632-ef4d61b295df [ 1915.491905] env[62820]: DEBUG oslo_concurrency.lockutils [req-905f5b70-1efb-4688-a60a-374b2cb5fbe3 req-16e6d834-fa3e-4628-939b-def0e98186da service nova] Releasing lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.500909] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.501131] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.501297] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.501448] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1915.502317] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee02b87-49c2-491e-a019-a1b6b01c7689 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.509825] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "901626d2-1788-4017-b0c7-52537618804c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.522s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.511666] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a56fa5-7649-4348-a952-dfa051eb218c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.516421] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.528s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.528110] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2085a0-8b7e-4cc6-96f1-3fcf48b4f88e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.534558] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3db6f1d-d58d-41c5-9e15-99f6ca0d8a66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.566261] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179610MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1915.566425] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.566632] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.625345] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696608, 'name': ReconfigVM_Task, 'duration_secs': 0.726247} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.625607] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2/72cdf2b2-fb69-4820-a663-56bfe92572d2.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1915.626232] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcce4675-0313-433c-b75e-b81eb2c4807b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.632623] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1915.632623] env[62820]: value = "task-1696612" [ 1915.632623] env[62820]: _type = "Task" [ 1915.632623] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.640234] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696612, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.668532] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52a855f0-499d-38b3-add7-05f197b59124, 'name': SearchDatastore_Task, 'duration_secs': 0.010779} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.668812] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.669050] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1915.670301] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.670301] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.670301] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1915.670301] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e69414f-daff-4a27-b669-cdd0b3b50e8f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.677925] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1915.678131] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1915.678835] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3acb3214-fde3-4d73-a967-8696756ee0c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.682963] env[62820]: DEBUG nova.network.neutron [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [{"id": "02ad8941-576b-4634-8cba-ffa38ff466c5", "address": "fa:16:3e:a5:00:bc", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02ad8941-57", "ovs_interfaceid": "02ad8941-576b-4634-8cba-ffa38ff466c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.685179] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1915.685179] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52b395e9-9f8f-1dcf-01c3-79572af5a08d" [ 1915.685179] env[62820]: _type = "Task" [ 1915.685179] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.694705] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b395e9-9f8f-1dcf-01c3-79572af5a08d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.843788] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.098976] env[62820]: DEBUG nova.network.neutron [-] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.142160] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696612, 'name': Rename_Task, 'duration_secs': 0.161074} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.142738] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1916.143008] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d6a071f-ad2d-4ad3-8c9e-13bb490931e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.149208] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1916.149208] env[62820]: value = "task-1696613" [ 1916.149208] env[62820]: _type = "Task" [ 1916.149208] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.158355] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.191319] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.196253] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52b395e9-9f8f-1dcf-01c3-79572af5a08d, 'name': SearchDatastore_Task, 'duration_secs': 0.009084} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.197460] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1abb5a47-da9c-4276-aefe-f42556e7cd07 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.202398] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1916.202398] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52959ae7-a317-bb00-bb00-f83927f7268f" [ 1916.202398] env[62820]: _type = "Task" [ 1916.202398] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.211482] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52959ae7-a317-bb00-bb00-f83927f7268f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.276062] env[62820]: DEBUG nova.compute.manager [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received event network-vif-unplugged-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1916.276357] env[62820]: DEBUG oslo_concurrency.lockutils [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.276628] env[62820]: DEBUG oslo_concurrency.lockutils [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.276968] env[62820]: DEBUG oslo_concurrency.lockutils [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.277216] env[62820]: DEBUG nova.compute.manager [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] No waiting events found dispatching network-vif-unplugged-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1916.277441] env[62820]: WARNING nova.compute.manager [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received unexpected event network-vif-unplugged-7f7affc8-f587-4484-9eef-211d6ea80226 for instance with vm_state shelved_offloaded and task_state None. [ 1916.277683] env[62820]: DEBUG nova.compute.manager [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received event network-changed-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1916.277876] env[62820]: DEBUG nova.compute.manager [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Refreshing instance network info cache due to event network-changed-7f7affc8-f587-4484-9eef-211d6ea80226. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1916.278115] env[62820]: DEBUG oslo_concurrency.lockutils [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] Acquiring lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.278306] env[62820]: DEBUG oslo_concurrency.lockutils [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] Acquired lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.278528] env[62820]: DEBUG nova.network.neutron [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Refreshing network info cache for port 7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1916.579501] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Applying migration context for instance 41666e62-526d-4553-a005-07cbc2321d0d as it has an incoming, in-progress migration 94dd1254-4c4e-4010-a069-eefe1dc83c3e. Migration status is pre-migrating {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1916.580174] env[62820]: INFO nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating resource usage from migration 94dd1254-4c4e-4010-a069-eefe1dc83c3e [ 1916.601385] env[62820]: INFO nova.compute.manager [-] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Took 1.39 seconds to deallocate network for instance. [ 1916.604239] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1916.604384] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 901626d2-1788-4017-b0c7-52537618804c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1916.604571] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance c15bbb69-84a0-4fda-a509-66218b9c9f70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1916.604727] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 4d69baaa-83da-4c5f-b88f-928693505520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1916.604847] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 72cdf2b2-fb69-4820-a663-56bfe92572d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1916.604971] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 7a923678-5eea-4149-9a6d-0594fdb532c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1916.605110] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Migration 94dd1254-4c4e-4010-a069-eefe1dc83c3e is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1916.605213] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 41666e62-526d-4553-a005-07cbc2321d0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1916.605401] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1916.605612] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1916.660679] env[62820]: DEBUG oslo_vmware.api [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696613, 'name': PowerOnVM_Task, 'duration_secs': 0.448622} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.663010] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1916.663232] env[62820]: INFO nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Took 7.04 seconds to spawn the instance on the hypervisor. [ 1916.663413] env[62820]: DEBUG nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1916.664322] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3872235a-1b5c-4fe9-9f48-c514d951ee00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.713724] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52959ae7-a317-bb00-bb00-f83927f7268f, 'name': SearchDatastore_Task, 'duration_secs': 0.009131} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.716466] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.716729] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7a923678-5eea-4149-9a6d-0594fdb532c8/7a923678-5eea-4149-9a6d-0594fdb532c8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1916.717412] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c69e6d4-f8e0-405c-99a0-be1a0dd0f926 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.723969] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1916.723969] env[62820]: value = "task-1696614" [ 1916.723969] env[62820]: _type = "Task" [ 1916.723969] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.728710] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e19c348-19e5-4470-b360-1e656b5e29d6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.739157] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caaeb4da-4f4e-4f74-8dc5-208b33af385a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.742845] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696614, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.771764] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a669a8-37fa-48eb-865c-6cd4354fc2da {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.779307] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0164b96a-7b1d-4721-864c-a8c783973f1b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.795967] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1917.024506] env[62820]: DEBUG nova.network.neutron [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updated VIF entry in instance network info cache for port 7f7affc8-f587-4484-9eef-211d6ea80226. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1917.024847] env[62820]: DEBUG nova.network.neutron [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [{"id": "7f7affc8-f587-4484-9eef-211d6ea80226", "address": "fa:16:3e:e1:6f:92", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap7f7affc8-f5", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.116246] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.182769] env[62820]: INFO nova.compute.manager [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Took 14.64 seconds to build instance. [ 1917.234569] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696614, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464864} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.234831] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 7a923678-5eea-4149-9a6d-0594fdb532c8/7a923678-5eea-4149-9a6d-0594fdb532c8.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1917.235063] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1917.235324] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ceb985f6-e92a-433a-9df8-fd5a904e737a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.242379] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1917.242379] env[62820]: value = "task-1696615" [ 1917.242379] env[62820]: _type = "Task" [ 1917.242379] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.250947] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696615, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.299044] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1917.528649] env[62820]: DEBUG oslo_concurrency.lockutils [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] Releasing lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.528649] env[62820]: DEBUG nova.compute.manager [req-482903ab-89a4-4ccb-b902-e40cd6822afa req-40cb254b-40b3-4ff0-8f97-014d3e4b6698 service nova] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] Received event network-vif-deleted-ad74c59c-92d9-43b7-8a73-b480a40ae561 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1917.555709] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1917.555962] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353701', 'volume_id': '86af3623-ba32-4685-98de-cf2fde2698cd', 'name': 'volume-86af3623-ba32-4685-98de-cf2fde2698cd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4d69baaa-83da-4c5f-b88f-928693505520', 'attached_at': '', 'detached_at': '', 'volume_id': '86af3623-ba32-4685-98de-cf2fde2698cd', 'serial': '86af3623-ba32-4685-98de-cf2fde2698cd'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1917.556917] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e20677d-69e1-4fcb-9b5b-0c06940d017a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.574154] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422da2c9-510f-4204-b067-5534cefcbeab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.601302] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-86af3623-ba32-4685-98de-cf2fde2698cd/volume-86af3623-ba32-4685-98de-cf2fde2698cd.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1917.601626] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8781a88d-b00b-4cb2-a67a-451815e0c16a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.620628] env[62820]: DEBUG oslo_vmware.api [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1917.620628] env[62820]: value = "task-1696616" [ 1917.620628] env[62820]: _type = "Task" [ 1917.620628] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.628846] env[62820]: DEBUG oslo_vmware.api [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696616, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.664695] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.684252] env[62820]: DEBUG oslo_concurrency.lockutils [None req-f7095471-f91a-4cca-bf6c-cc2885d6bff8 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.151s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.684574] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.695s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.684800] env[62820]: INFO nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] During sync_power_state the instance has a pending task (spawning). Skip. [ 1917.684985] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.708837] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a21247-88fb-416a-b2e6-8bddfa01e2a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.728477] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance '41666e62-526d-4553-a005-07cbc2321d0d' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1917.752037] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696615, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065639} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.752037] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1917.752668] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa1c94c-f76a-4ae9-9ba8-263e5fce9e16 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.775239] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 7a923678-5eea-4149-9a6d-0594fdb532c8/7a923678-5eea-4149-9a6d-0594fdb532c8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1917.776354] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96b9d77a-63ad-4168-a2bd-4e066603d54c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.796980] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1917.796980] env[62820]: value = "task-1696617" [ 1917.796980] env[62820]: _type = "Task" [ 1917.796980] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.805820] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1917.806049] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.239s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.806225] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696617, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.806484] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.963s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.806664] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.809541] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.693s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.809899] env[62820]: DEBUG nova.objects.instance [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lazy-loading 'resources' on Instance uuid c15bbb69-84a0-4fda-a509-66218b9c9f70 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1918.133977] env[62820]: DEBUG oslo_vmware.api [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696616, 'name': ReconfigVM_Task, 'duration_secs': 0.371112} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.134408] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-86af3623-ba32-4685-98de-cf2fde2698cd/volume-86af3623-ba32-4685-98de-cf2fde2698cd.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1918.141829] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-458f84fe-8904-432b-9df4-46d00dfdac76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.163509] env[62820]: DEBUG oslo_vmware.api [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1918.163509] env[62820]: value = "task-1696618" [ 1918.163509] env[62820]: _type = "Task" [ 1918.163509] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.171661] env[62820]: DEBUG oslo_vmware.api [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696618, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.234688] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1918.235040] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24ed53af-b2d6-463d-9523-dba86af05282 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.244966] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1918.244966] env[62820]: value = "task-1696619" [ 1918.244966] env[62820]: _type = "Task" [ 1918.244966] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.256448] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696619, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.305716] env[62820]: DEBUG nova.compute.manager [req-f334cdf6-9317-4283-84c4-aba7b547eb3c req-f5d80d30-14be-4671-beff-cf698df79dc4 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received event network-changed-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1918.305716] env[62820]: DEBUG nova.compute.manager [req-f334cdf6-9317-4283-84c4-aba7b547eb3c req-f5d80d30-14be-4671-beff-cf698df79dc4 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Refreshing instance network info cache due to event network-changed-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1918.305716] env[62820]: DEBUG oslo_concurrency.lockutils [req-f334cdf6-9317-4283-84c4-aba7b547eb3c req-f5d80d30-14be-4671-beff-cf698df79dc4 service nova] Acquiring lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.305716] env[62820]: DEBUG oslo_concurrency.lockutils [req-f334cdf6-9317-4283-84c4-aba7b547eb3c req-f5d80d30-14be-4671-beff-cf698df79dc4 service nova] Acquired lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.305917] env[62820]: DEBUG nova.network.neutron [req-f334cdf6-9317-4283-84c4-aba7b547eb3c req-f5d80d30-14be-4671-beff-cf698df79dc4 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Refreshing network info cache for port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1918.317078] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696617, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.322737] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c026ff27-899d-42f0-88c0-9f69fe8acee2 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.662s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.323758] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.335s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.323924] env[62820]: INFO nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] During sync_power_state the instance has a pending task (shelving_offloading). Skip. [ 1918.324117] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.324779] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.660s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.324985] env[62820]: INFO nova.compute.manager [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Unshelving [ 1918.468661] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e232b1-99f3-476e-98cf-ddf3c8cd83c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.476234] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de578782-d686-42d6-8549-c01829504981 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.505849] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16efa31-3f58-4649-b548-db20c64b0773 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.513279] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61665dfd-e151-4c7e-984f-9c58383ed0fb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.526326] env[62820]: DEBUG nova.compute.provider_tree [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.673897] env[62820]: DEBUG oslo_vmware.api [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696618, 'name': ReconfigVM_Task, 'duration_secs': 0.14203} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.674223] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353701', 'volume_id': '86af3623-ba32-4685-98de-cf2fde2698cd', 'name': 'volume-86af3623-ba32-4685-98de-cf2fde2698cd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4d69baaa-83da-4c5f-b88f-928693505520', 'attached_at': '', 'detached_at': '', 'volume_id': '86af3623-ba32-4685-98de-cf2fde2698cd', 'serial': '86af3623-ba32-4685-98de-cf2fde2698cd'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1918.755270] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696619, 'name': PowerOffVM_Task, 'duration_secs': 0.172906} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.755603] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1918.755883] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance '41666e62-526d-4553-a005-07cbc2321d0d' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1918.812836] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696617, 'name': ReconfigVM_Task, 'duration_secs': 0.564987} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.813089] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 7a923678-5eea-4149-9a6d-0594fdb532c8/7a923678-5eea-4149-9a6d-0594fdb532c8.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1918.814022] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d027ec4-feae-4007-bcd6-023acbe38e48 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.820483] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1918.820483] env[62820]: value = "task-1696620" [ 1918.820483] env[62820]: _type = "Task" [ 1918.820483] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.830258] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696620, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.029582] env[62820]: DEBUG nova.scheduler.client.report [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1919.042104] env[62820]: DEBUG nova.network.neutron [req-f334cdf6-9317-4283-84c4-aba7b547eb3c req-f5d80d30-14be-4671-beff-cf698df79dc4 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updated VIF entry in instance network info cache for port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1919.042520] env[62820]: DEBUG nova.network.neutron [req-f334cdf6-9317-4283-84c4-aba7b547eb3c req-f5d80d30-14be-4671-beff-cf698df79dc4 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [{"id": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "address": "fa:16:3e:1f:49:02", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ebb702-61", "ovs_interfaceid": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.262507] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1919.262802] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1919.262924] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1919.263132] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1919.263349] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1919.263538] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1919.263761] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1919.263952] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1919.264146] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1919.264336] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1919.264528] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1919.269974] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2aa0314e-0c39-4472-b190-5c0d0b85d4d3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.287731] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1919.287731] env[62820]: value = "task-1696621" [ 1919.287731] env[62820]: _type = "Task" [ 1919.287731] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.296175] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696621, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.330503] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696620, 'name': Rename_Task, 'duration_secs': 0.140659} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.330829] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1919.331160] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90e7edab-816d-4f4f-b8e9-e2ddd5c3e4a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.338674] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1919.338674] env[62820]: value = "task-1696622" [ 1919.338674] env[62820]: _type = "Task" [ 1919.338674] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.347195] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.353609] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.534485] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.725s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.536836] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.183s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.537083] env[62820]: DEBUG nova.objects.instance [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lazy-loading 'pci_requests' on Instance uuid 2aeeb809-0b27-411b-b632-ef4d61b295df {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1919.544963] env[62820]: DEBUG oslo_concurrency.lockutils [req-f334cdf6-9317-4283-84c4-aba7b547eb3c req-f5d80d30-14be-4671-beff-cf698df79dc4 service nova] Releasing lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.552839] env[62820]: INFO nova.scheduler.client.report [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Deleted allocations for instance c15bbb69-84a0-4fda-a509-66218b9c9f70 [ 1919.712493] env[62820]: DEBUG nova.objects.instance [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'flavor' on Instance uuid 4d69baaa-83da-4c5f-b88f-928693505520 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1919.800322] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696621, 'name': ReconfigVM_Task, 'duration_secs': 0.174284} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.800819] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance '41666e62-526d-4553-a005-07cbc2321d0d' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1919.849641] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696622, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.040998] env[62820]: DEBUG nova.objects.instance [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lazy-loading 'numa_topology' on Instance uuid 2aeeb809-0b27-411b-b632-ef4d61b295df {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1920.059061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-4d483267-f867-4df6-a907-a33d1f3b2da1 tempest-ServerRescueTestJSON-512574843 tempest-ServerRescueTestJSON-512574843-project-member] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.056s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.059882] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.071s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.060086] env[62820]: INFO nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: c15bbb69-84a0-4fda-a509-66218b9c9f70] During sync_power_state the instance has a pending task (deleting). Skip. [ 1920.060260] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "c15bbb69-84a0-4fda-a509-66218b9c9f70" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.216916] env[62820]: DEBUG oslo_concurrency.lockutils [None req-aa7d2c3f-e176-4f8a-aac5-9aebdf22d117 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.260s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.217755] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "4d69baaa-83da-4c5f-b88f-928693505520" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.228s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.218761] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6581ca9-18c0-4ae6-9bd7-646478f5e190 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.310725] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1920.311155] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1920.311202] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1920.311430] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1920.311616] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1920.311899] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1920.312192] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1920.312377] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1920.312588] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1920.312804] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1920.313030] env[62820]: DEBUG nova.virt.hardware [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1920.318453] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1920.318738] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0aafa749-1b56-4c07-a3ef-4ece9356ab38 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.336846] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1920.336846] env[62820]: value = "task-1696623" [ 1920.336846] env[62820]: _type = "Task" [ 1920.336846] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.347146] env[62820]: DEBUG oslo_vmware.api [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696622, 'name': PowerOnVM_Task, 'duration_secs': 0.520893} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.350079] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1920.350283] env[62820]: INFO nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Took 8.50 seconds to spawn the instance on the hypervisor. [ 1920.350464] env[62820]: DEBUG nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1920.350733] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696623, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.351411] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd6084f-d5ab-4135-94e7-f607cbffe65e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.466746] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "4d69baaa-83da-4c5f-b88f-928693505520" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.543627] env[62820]: INFO nova.compute.claims [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1920.728542] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "4d69baaa-83da-4c5f-b88f-928693505520" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.511s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.728879] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.262s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.846288] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696623, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.868488] env[62820]: INFO nova.compute.manager [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Took 13.84 seconds to build instance. [ 1921.180478] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181bd453-78c2-4e38-92d4-b404ffa26efc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.189064] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e437cdbf-0294-4ab5-b529-5274c55ab665 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.220796] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f49e84d-fd9b-478b-b5d8-022b2864f401 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.225759] env[62820]: DEBUG nova.compute.manager [req-cedbab4c-4b3e-4d28-9190-fa7b3662e7fa req-ddeee1c4-c3e2-46fa-b52d-215938322ac4 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Received event network-changed-56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1921.225964] env[62820]: DEBUG nova.compute.manager [req-cedbab4c-4b3e-4d28-9190-fa7b3662e7fa req-ddeee1c4-c3e2-46fa-b52d-215938322ac4 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Refreshing instance network info cache due to event network-changed-56792423-7b5c-472d-8d0c-85c04d5dfe61. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1921.226286] env[62820]: DEBUG oslo_concurrency.lockutils [req-cedbab4c-4b3e-4d28-9190-fa7b3662e7fa req-ddeee1c4-c3e2-46fa-b52d-215938322ac4 service nova] Acquiring lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.226337] env[62820]: DEBUG oslo_concurrency.lockutils [req-cedbab4c-4b3e-4d28-9190-fa7b3662e7fa req-ddeee1c4-c3e2-46fa-b52d-215938322ac4 service nova] Acquired lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.226536] env[62820]: DEBUG nova.network.neutron [req-cedbab4c-4b3e-4d28-9190-fa7b3662e7fa req-ddeee1c4-c3e2-46fa-b52d-215938322ac4 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Refreshing network info cache for port 56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1921.232017] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957e1405-1869-4a23-9393-718fee491b4e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.235916] env[62820]: INFO nova.compute.manager [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Detaching volume 86af3623-ba32-4685-98de-cf2fde2698cd [ 1921.252798] env[62820]: DEBUG nova.compute.provider_tree [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1921.286882] env[62820]: INFO nova.virt.block_device [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Attempting to driver detach volume 86af3623-ba32-4685-98de-cf2fde2698cd from mountpoint /dev/sdb [ 1921.287167] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1921.287348] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353701', 'volume_id': '86af3623-ba32-4685-98de-cf2fde2698cd', 'name': 'volume-86af3623-ba32-4685-98de-cf2fde2698cd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4d69baaa-83da-4c5f-b88f-928693505520', 'attached_at': '', 'detached_at': '', 'volume_id': '86af3623-ba32-4685-98de-cf2fde2698cd', 'serial': '86af3623-ba32-4685-98de-cf2fde2698cd'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1921.288349] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debf2d09-57e4-4232-9f1f-c49def4341f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.311743] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba21b46-c777-49bc-8119-6fc4f9a95bcc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.319102] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ac5786-1b36-45f7-9287-62b8937b4adf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.342119] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b436996-fa66-4d84-bf40-2dcd635baca3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.349276] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696623, 'name': ReconfigVM_Task, 'duration_secs': 0.517226} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.361270] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1921.361690] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] The volume has not been displaced from its original location: [datastore1] volume-86af3623-ba32-4685-98de-cf2fde2698cd/volume-86af3623-ba32-4685-98de-cf2fde2698cd.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1921.366956] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1921.367688] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d12bc6-b51c-4810-b94e-b0bc06fe12b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.370039] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d0211b5-cfe6-4fd6-99b1-f1e3ef1e9a20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.382657] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7aaf6f5b-94b6-47be-84c3-2f08c4513005 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.371s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.383237] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.393s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.383420] env[62820]: INFO nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] During sync_power_state the instance has a pending task (spawning). Skip. [ 1921.383587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.405782] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d/41666e62-526d-4553-a005-07cbc2321d0d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1921.407576] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11147093-2cd6-4b14-b844-72b4190987ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.421084] env[62820]: DEBUG oslo_vmware.api [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1921.421084] env[62820]: value = "task-1696624" [ 1921.421084] env[62820]: _type = "Task" [ 1921.421084] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.428475] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1921.428475] env[62820]: value = "task-1696625" [ 1921.428475] env[62820]: _type = "Task" [ 1921.428475] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.431724] env[62820]: DEBUG oslo_vmware.api [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.440227] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696625, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.755512] env[62820]: DEBUG nova.scheduler.client.report [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1921.946036] env[62820]: DEBUG oslo_vmware.api [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696624, 'name': ReconfigVM_Task, 'duration_secs': 0.235254} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.949621] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1921.956601] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dd674ea-6719-44ff-bc92-1913f567cc3a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.973409] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696625, 'name': ReconfigVM_Task, 'duration_secs': 0.274125} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.973409] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d/41666e62-526d-4553-a005-07cbc2321d0d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1921.973555] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance '41666e62-526d-4553-a005-07cbc2321d0d' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1921.979782] env[62820]: DEBUG oslo_vmware.api [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1921.979782] env[62820]: value = "task-1696626" [ 1921.979782] env[62820]: _type = "Task" [ 1921.979782] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.989052] env[62820]: DEBUG oslo_vmware.api [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696626, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.006366] env[62820]: DEBUG nova.network.neutron [req-cedbab4c-4b3e-4d28-9190-fa7b3662e7fa req-ddeee1c4-c3e2-46fa-b52d-215938322ac4 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Updated VIF entry in instance network info cache for port 56792423-7b5c-472d-8d0c-85c04d5dfe61. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1922.006728] env[62820]: DEBUG nova.network.neutron [req-cedbab4c-4b3e-4d28-9190-fa7b3662e7fa req-ddeee1c4-c3e2-46fa-b52d-215938322ac4 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Updating instance_info_cache with network_info: [{"id": "56792423-7b5c-472d-8d0c-85c04d5dfe61", "address": "fa:16:3e:82:8a:1e", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56792423-7b", "ovs_interfaceid": "56792423-7b5c-472d-8d0c-85c04d5dfe61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.261131] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.724s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.309307] env[62820]: INFO nova.network.neutron [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating port 7f7affc8-f587-4484-9eef-211d6ea80226 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1922.481108] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3005cb85-58d6-49b5-9cf3-611637345ac9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.497138] env[62820]: DEBUG oslo_vmware.api [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696626, 'name': ReconfigVM_Task, 'duration_secs': 0.151446} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.519089] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353701', 'volume_id': '86af3623-ba32-4685-98de-cf2fde2698cd', 'name': 'volume-86af3623-ba32-4685-98de-cf2fde2698cd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4d69baaa-83da-4c5f-b88f-928693505520', 'attached_at': '', 'detached_at': '', 'volume_id': '86af3623-ba32-4685-98de-cf2fde2698cd', 'serial': '86af3623-ba32-4685-98de-cf2fde2698cd'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1922.519792] env[62820]: DEBUG oslo_concurrency.lockutils [req-cedbab4c-4b3e-4d28-9190-fa7b3662e7fa req-ddeee1c4-c3e2-46fa-b52d-215938322ac4 service nova] Releasing lock "refresh_cache-7a923678-5eea-4149-9a6d-0594fdb532c8" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.521164] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e92db1-1497-42a2-845e-fa6a323d2bcd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.545739] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance '41666e62-526d-4553-a005-07cbc2321d0d' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1923.068995] env[62820]: DEBUG nova.objects.instance [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'flavor' on Instance uuid 4d69baaa-83da-4c5f-b88f-928693505520 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1923.096151] env[62820]: DEBUG nova.network.neutron [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Port 02ad8941-576b-4634-8cba-ffa38ff466c5 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1924.076899] env[62820]: DEBUG oslo_concurrency.lockutils [None req-10a88509-234a-404a-8e3a-93d05ab9ccc5 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.348s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.116986] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "41666e62-526d-4553-a005-07cbc2321d0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.117228] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "41666e62-526d-4553-a005-07cbc2321d0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.117405] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "41666e62-526d-4553-a005-07cbc2321d0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.852524] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "4d69baaa-83da-4c5f-b88f-928693505520" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.852845] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.853127] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "4d69baaa-83da-4c5f-b88f-928693505520-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.853355] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.853561] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.855775] env[62820]: INFO nova.compute.manager [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Terminating instance [ 1925.170799] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.171213] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.171326] env[62820]: DEBUG nova.network.neutron [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1925.360524] env[62820]: DEBUG nova.compute.manager [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1925.360895] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1925.361720] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511ac557-5330-449a-beb5-312fbdf23907 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.369169] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1925.369408] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-121db3e4-ef6d-40b2-8e43-796d25004e55 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.376561] env[62820]: DEBUG oslo_vmware.api [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1925.376561] env[62820]: value = "task-1696628" [ 1925.376561] env[62820]: _type = "Task" [ 1925.376561] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.384236] env[62820]: DEBUG oslo_vmware.api [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.886643] env[62820]: DEBUG oslo_vmware.api [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696628, 'name': PowerOffVM_Task, 'duration_secs': 0.181431} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.886931] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1925.887157] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1925.887422] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-49126405-2842-407e-94af-338ad4ee3bb1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.896918] env[62820]: DEBUG nova.network.neutron [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [{"id": "02ad8941-576b-4634-8cba-ffa38ff466c5", "address": "fa:16:3e:a5:00:bc", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02ad8941-57", "ovs_interfaceid": "02ad8941-576b-4634-8cba-ffa38ff466c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.959368] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1925.959661] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1925.959984] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleting the datastore file [datastore1] 4d69baaa-83da-4c5f-b88f-928693505520 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1925.960318] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c7a96ba-d4a1-4eb4-8761-6818a5ec1185 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.967076] env[62820]: DEBUG oslo_vmware.api [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1925.967076] env[62820]: value = "task-1696631" [ 1925.967076] env[62820]: _type = "Task" [ 1925.967076] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.974853] env[62820]: DEBUG oslo_vmware.api [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.298599] env[62820]: DEBUG nova.compute.manager [req-717c1ff4-6c7f-4594-9bb2-b49039c5a271 req-a9959e5b-a4c2-4572-b7b0-7aca8e594a61 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received event network-vif-plugged-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1926.298875] env[62820]: DEBUG oslo_concurrency.lockutils [req-717c1ff4-6c7f-4594-9bb2-b49039c5a271 req-a9959e5b-a4c2-4572-b7b0-7aca8e594a61 service nova] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.299092] env[62820]: DEBUG oslo_concurrency.lockutils [req-717c1ff4-6c7f-4594-9bb2-b49039c5a271 req-a9959e5b-a4c2-4572-b7b0-7aca8e594a61 service nova] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.299307] env[62820]: DEBUG oslo_concurrency.lockutils [req-717c1ff4-6c7f-4594-9bb2-b49039c5a271 req-a9959e5b-a4c2-4572-b7b0-7aca8e594a61 service nova] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.299566] env[62820]: DEBUG nova.compute.manager [req-717c1ff4-6c7f-4594-9bb2-b49039c5a271 req-a9959e5b-a4c2-4572-b7b0-7aca8e594a61 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] No waiting events found dispatching network-vif-plugged-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1926.299804] env[62820]: WARNING nova.compute.manager [req-717c1ff4-6c7f-4594-9bb2-b49039c5a271 req-a9959e5b-a4c2-4572-b7b0-7aca8e594a61 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received unexpected event network-vif-plugged-7f7affc8-f587-4484-9eef-211d6ea80226 for instance with vm_state shelved_offloaded and task_state spawning. [ 1926.400421] env[62820]: DEBUG oslo_concurrency.lockutils [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.413363] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.413439] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.413625] env[62820]: DEBUG nova.network.neutron [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1926.476794] env[62820]: DEBUG oslo_vmware.api [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.417943} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.477070] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1926.477248] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1926.477431] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1926.477602] env[62820]: INFO nova.compute.manager [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1926.477841] env[62820]: DEBUG oslo.service.loopingcall [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1926.478049] env[62820]: DEBUG nova.compute.manager [-] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1926.478149] env[62820]: DEBUG nova.network.neutron [-] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1926.928656] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217eda03-872e-4ac4-b624-ae36f841db40 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.949990] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec01ad60-4ea8-4c0c-a65f-ac1ddb809dba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.957687] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance '41666e62-526d-4553-a005-07cbc2321d0d' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1927.153936] env[62820]: DEBUG nova.network.neutron [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [{"id": "7f7affc8-f587-4484-9eef-211d6ea80226", "address": "fa:16:3e:e1:6f:92", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7affc8-f5", "ovs_interfaceid": "7f7affc8-f587-4484-9eef-211d6ea80226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.465273] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1927.465646] env[62820]: DEBUG nova.network.neutron [-] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.467350] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4f97c2c-03bd-4992-98aa-3f40543c00f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.478217] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1927.478217] env[62820]: value = "task-1696632" [ 1927.478217] env[62820]: _type = "Task" [ 1927.478217] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.492355] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.657619] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.687888] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='da458db99c8704730bdd0665528132b0',container_format='bare',created_at=2024-12-10T16:57:53Z,direct_url=,disk_format='vmdk',id=8502cdb1-2a78-4742-9247-16bbadfc0ad6,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-794300850-shelved',owner='bfe9869537de4334a0c8ce91fd062659',properties=ImageMetaProps,protected=,size=31662592,status='active',tags=,updated_at=2024-12-10T16:58:07Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1927.688169] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1927.688327] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1927.688511] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1927.688659] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1927.688807] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1927.689039] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1927.689205] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1927.689373] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1927.689536] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1927.689730] env[62820]: DEBUG nova.virt.hardware [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1927.690636] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2b5271-e958-4107-9536-c0d61ccc90c4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.699408] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d542812e-31cd-4634-aa75-57c07571d16a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.713707] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:6f:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f7affc8-f587-4484-9eef-211d6ea80226', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1927.721169] env[62820]: DEBUG oslo.service.loopingcall [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1927.721505] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1927.721730] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1e38b13-cf6a-496c-a1ca-76a10329f96f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.742059] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1927.742059] env[62820]: value = "task-1696633" [ 1927.742059] env[62820]: _type = "Task" [ 1927.742059] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.750896] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696633, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.971890] env[62820]: INFO nova.compute.manager [-] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Took 1.49 seconds to deallocate network for instance. [ 1927.988025] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696632, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.252476] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696633, 'name': CreateVM_Task, 'duration_secs': 0.444379} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.252659] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1928.253357] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.253530] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.253899] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1928.254174] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7073015f-e64b-4427-9e91-2c9dec33ea18 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.258963] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1928.258963] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525e2b80-7daf-c2d1-4633-e0337593a40e" [ 1928.258963] env[62820]: _type = "Task" [ 1928.258963] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.266905] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525e2b80-7daf-c2d1-4633-e0337593a40e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.327257] env[62820]: DEBUG nova.compute.manager [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received event network-changed-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1928.327541] env[62820]: DEBUG nova.compute.manager [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Refreshing instance network info cache due to event network-changed-7f7affc8-f587-4484-9eef-211d6ea80226. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1928.327915] env[62820]: DEBUG oslo_concurrency.lockutils [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] Acquiring lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.328142] env[62820]: DEBUG oslo_concurrency.lockutils [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] Acquired lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.328321] env[62820]: DEBUG nova.network.neutron [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Refreshing network info cache for port 7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1928.480491] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.480920] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.481054] env[62820]: DEBUG nova.objects.instance [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'resources' on Instance uuid 4d69baaa-83da-4c5f-b88f-928693505520 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1928.492798] env[62820]: DEBUG oslo_vmware.api [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696632, 'name': PowerOnVM_Task, 'duration_secs': 0.72752} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.493088] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1928.493251] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-49ef6146-6172-41f2-b3af-d20ebad0cc99 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance '41666e62-526d-4553-a005-07cbc2321d0d' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1928.769185] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.769438] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Processing image 8502cdb1-2a78-4742-9247-16bbadfc0ad6 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1928.769674] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6/8502cdb1-2a78-4742-9247-16bbadfc0ad6.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.769850] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6/8502cdb1-2a78-4742-9247-16bbadfc0ad6.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.770056] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1928.770318] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc6969af-e2a4-4c90-b569-7a7a509353f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.779364] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1928.779588] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1928.780481] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d4f9f09-b230-4cc9-ba47-8a29299af39b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.785984] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1928.785984] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]521bd925-adfe-c535-4e71-5e4812fcd94a" [ 1928.785984] env[62820]: _type = "Task" [ 1928.785984] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.800765] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]521bd925-adfe-c535-4e71-5e4812fcd94a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.084663] env[62820]: DEBUG nova.network.neutron [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updated VIF entry in instance network info cache for port 7f7affc8-f587-4484-9eef-211d6ea80226. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1929.085115] env[62820]: DEBUG nova.network.neutron [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [{"id": "7f7affc8-f587-4484-9eef-211d6ea80226", "address": "fa:16:3e:e1:6f:92", "network": {"id": "b963feda-c4d5-4cd5-a163-8fed7a0b39e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1301546256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe9869537de4334a0c8ce91fd062659", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7affc8-f5", "ovs_interfaceid": "7f7affc8-f587-4484-9eef-211d6ea80226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1929.113982] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3ecc98-c9dc-402e-8750-83bcf4936577 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.122603] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eab7dc-cc98-4f66-84f1-2b22a695d278 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.152653] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9e8ce6-cb18-4869-9de9-513a014722cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.161296] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc75b73f-e18c-4e24-9623-2594ff68a473 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.177278] env[62820]: DEBUG nova.compute.provider_tree [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1929.300609] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Preparing fetch location {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1929.301032] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Fetch image to [datastore1] OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf/OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf.vmdk {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1929.301342] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Downloading stream optimized image 8502cdb1-2a78-4742-9247-16bbadfc0ad6 to [datastore1] OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf/OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf.vmdk on the data store datastore1 as vApp {{(pid=62820) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1929.301630] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Downloading image file data 8502cdb1-2a78-4742-9247-16bbadfc0ad6 to the ESX as VM named 'OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf' {{(pid=62820) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1929.385118] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1929.385118] env[62820]: value = "resgroup-9" [ 1929.385118] env[62820]: _type = "ResourcePool" [ 1929.385118] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1929.385446] env[62820]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d8bf8837-812a-4d60-b6ac-89b0043a24d7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.407643] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lease: (returnval){ [ 1929.407643] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265518c-83a1-ab22-2908-d8b9173dfe6b" [ 1929.407643] env[62820]: _type = "HttpNfcLease" [ 1929.407643] env[62820]: } obtained for vApp import into resource pool (val){ [ 1929.407643] env[62820]: value = "resgroup-9" [ 1929.407643] env[62820]: _type = "ResourcePool" [ 1929.407643] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1929.408042] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the lease: (returnval){ [ 1929.408042] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265518c-83a1-ab22-2908-d8b9173dfe6b" [ 1929.408042] env[62820]: _type = "HttpNfcLease" [ 1929.408042] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1929.414666] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1929.414666] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265518c-83a1-ab22-2908-d8b9173dfe6b" [ 1929.414666] env[62820]: _type = "HttpNfcLease" [ 1929.414666] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1929.590355] env[62820]: DEBUG oslo_concurrency.lockutils [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] Releasing lock "refresh_cache-2aeeb809-0b27-411b-b632-ef4d61b295df" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.590759] env[62820]: DEBUG nova.compute.manager [req-55d254de-070f-47db-bbea-ce4408862e15 req-b02ab74c-389e-4ec3-96f7-821afae8cf59 service nova] [instance: 4d69baaa-83da-4c5f-b88f-928693505520] Received event network-vif-deleted-023e5277-4e70-4789-bcb4-9a410cba8ec4 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1929.681030] env[62820]: DEBUG nova.scheduler.client.report [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1929.918385] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1929.918385] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265518c-83a1-ab22-2908-d8b9173dfe6b" [ 1929.918385] env[62820]: _type = "HttpNfcLease" [ 1929.918385] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1930.186263] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.205934] env[62820]: INFO nova.scheduler.client.report [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleted allocations for instance 4d69baaa-83da-4c5f-b88f-928693505520 [ 1930.392329] env[62820]: DEBUG nova.network.neutron [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Port 02ad8941-576b-4634-8cba-ffa38ff466c5 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1930.392897] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.393099] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.393319] env[62820]: DEBUG nova.network.neutron [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1930.417656] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1930.417656] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265518c-83a1-ab22-2908-d8b9173dfe6b" [ 1930.417656] env[62820]: _type = "HttpNfcLease" [ 1930.417656] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1930.714845] env[62820]: DEBUG oslo_concurrency.lockutils [None req-3967011b-6f4a-4a07-85e4-17040634c00f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "4d69baaa-83da-4c5f-b88f-928693505520" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.862s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.918382] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1930.918382] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265518c-83a1-ab22-2908-d8b9173dfe6b" [ 1930.918382] env[62820]: _type = "HttpNfcLease" [ 1930.918382] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1931.201684] env[62820]: DEBUG nova.network.neutron [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [{"id": "02ad8941-576b-4634-8cba-ffa38ff466c5", "address": "fa:16:3e:a5:00:bc", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02ad8941-57", "ovs_interfaceid": "02ad8941-576b-4634-8cba-ffa38ff466c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.421857] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1931.421857] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265518c-83a1-ab22-2908-d8b9173dfe6b" [ 1931.421857] env[62820]: _type = "HttpNfcLease" [ 1931.421857] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1931.422181] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1931.422181] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]5265518c-83a1-ab22-2908-d8b9173dfe6b" [ 1931.422181] env[62820]: _type = "HttpNfcLease" [ 1931.422181] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1931.422906] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0861815-4262-4f77-b789-11831c1a6390 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.430652] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52270496-aa7b-fc08-5a2d-8ea8cddd3987/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1931.430885] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating HTTP connection to write to file with size = 31662592 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52270496-aa7b-fc08-5a2d-8ea8cddd3987/disk-0.vmdk. {{(pid=62820) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1931.495297] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f527cf00-0bd5-46c9-817c-ad19adcffe35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.704500] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1932.208807] env[62820]: DEBUG nova.compute.manager [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62820) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1932.209201] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.209926] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.581746] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Completed reading data from the image iterator. {{(pid=62820) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1932.582044] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52270496-aa7b-fc08-5a2d-8ea8cddd3987/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1932.583354] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e66f89f-31c1-4431-9a93-c9ff1238922b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.592522] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52270496-aa7b-fc08-5a2d-8ea8cddd3987/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1932.592748] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52270496-aa7b-fc08-5a2d-8ea8cddd3987/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1932.593086] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-81f2e69a-3ef0-43e5-ad50-56219805569d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.713366] env[62820]: DEBUG nova.objects.instance [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'migration_context' on Instance uuid 41666e62-526d-4553-a005-07cbc2321d0d {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1932.804911] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52270496-aa7b-fc08-5a2d-8ea8cddd3987/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1932.805234] env[62820]: INFO nova.virt.vmwareapi.images [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Downloaded image file data 8502cdb1-2a78-4742-9247-16bbadfc0ad6 [ 1932.806203] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccd36c1-58bb-4df9-ab02-9cce8b31d779 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.822545] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb00f5b9-7222-410e-bc9d-15d57a62c89a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.849432] env[62820]: INFO nova.virt.vmwareapi.images [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] The imported VM was unregistered [ 1932.852301] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Caching image {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1932.852540] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Creating directory with path [datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6 {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1932.853212] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-914e9471-c126-4c34-bc94-e1e24b1a7cd9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.868689] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Created directory with path [datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6 {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1932.868912] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf/OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf.vmdk to [datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6/8502cdb1-2a78-4742-9247-16bbadfc0ad6.vmdk. {{(pid=62820) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1932.869205] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-46759133-9718-483f-87bf-acc7d449601d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.876740] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1932.876740] env[62820]: value = "task-1696639" [ 1932.876740] env[62820]: _type = "Task" [ 1932.876740] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.885223] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696639, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.102119] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.102355] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.344197] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986b1826-9cbb-4cef-bb67-d1a304bd0f0f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.353519] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d440d6-b300-45d5-abd4-d156f20e55ab {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.391678] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd387c9e-2b2b-4a05-8e53-6d1576211e3f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.400084] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696639, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.403352] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33097ca4-e09f-4b15-a41d-54ad98e36377 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.419219] env[62820]: DEBUG nova.compute.provider_tree [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1933.605022] env[62820]: DEBUG nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1933.900510] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696639, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.923576] env[62820]: DEBUG nova.scheduler.client.report [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1934.127436] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.402030] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696639, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.902170] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696639, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.936552] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.727s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.943559] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.816s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.945341] env[62820]: INFO nova.compute.claims [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1935.399860] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696639, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.901850] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696639, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.750299} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.901850] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf/OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf.vmdk to [datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6/8502cdb1-2a78-4742-9247-16bbadfc0ad6.vmdk. [ 1935.901850] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Cleaning up location [datastore1] OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1935.902533] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_73b26ae0-1ef4-48c6-9b65-7b0c4b6ff5bf {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1935.902533] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b30e0eff-4d86-4be7-867b-870bbcbaaf08 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.911076] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1935.911076] env[62820]: value = "task-1696641" [ 1935.911076] env[62820]: _type = "Task" [ 1935.911076] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.920255] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.063396] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21769f6b-bffb-4d61-8129-60a955baf4e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.071591] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9e2f8e-d10b-4a3c-a04f-4e5614b7e764 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.101134] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b941a359-ae8f-4a51-866c-56eb6f50d6df {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.108447] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8175c1c6-c428-4900-964f-266a2b46c014 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.124047] env[62820]: DEBUG nova.compute.provider_tree [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1936.421594] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041906} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.421867] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1936.422047] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6/8502cdb1-2a78-4742-9247-16bbadfc0ad6.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.422298] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6/8502cdb1-2a78-4742-9247-16bbadfc0ad6.vmdk to [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df/2aeeb809-0b27-411b-b632-ef4d61b295df.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1936.422549] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4e252b0-5eb2-436d-8369-a9aa00e30f14 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.429684] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1936.429684] env[62820]: value = "task-1696643" [ 1936.429684] env[62820]: _type = "Task" [ 1936.429684] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.437704] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.481868] env[62820]: INFO nova.compute.manager [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Swapping old allocation on dict_keys(['8a0693d4-1456-4a04-ae15-b1eaea0edd7a']) held by migration 94dd1254-4c4e-4010-a069-eefe1dc83c3e for instance [ 1936.506113] env[62820]: DEBUG nova.scheduler.client.report [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Overwriting current allocation {'allocations': {'8a0693d4-1456-4a04-ae15-b1eaea0edd7a': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 181}}, 'project_id': 'c57b0c64a8704e7aaeba4011866c7a24', 'user_id': '18246bae0222415c96ec5b252cf5bd6f', 'consumer_generation': 1} on consumer 41666e62-526d-4553-a005-07cbc2321d0d {{(pid=62820) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1936.588787] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.588992] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.589275] env[62820]: DEBUG nova.network.neutron [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1936.627504] env[62820]: DEBUG nova.scheduler.client.report [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1936.945370] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696643, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.133874] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.190s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.134473] env[62820]: DEBUG nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1937.343225] env[62820]: DEBUG nova.network.neutron [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [{"id": "02ad8941-576b-4634-8cba-ffa38ff466c5", "address": "fa:16:3e:a5:00:bc", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02ad8941-57", "ovs_interfaceid": "02ad8941-576b-4634-8cba-ffa38ff466c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.442446] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696643, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.640850] env[62820]: DEBUG nova.compute.utils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1937.642361] env[62820]: DEBUG nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1937.642616] env[62820]: DEBUG nova.network.neutron [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1937.685322] env[62820]: DEBUG nova.policy [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe4b58f7f5bd405db5c7f8b630032aa1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'accd5c1cf55248b780b00e33faf79fa0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1937.845757] env[62820]: DEBUG oslo_concurrency.lockutils [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-41666e62-526d-4553-a005-07cbc2321d0d" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.846284] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1937.846586] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-595c197e-b26a-4f24-a547-25cfd49f62d2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.857282] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1937.857282] env[62820]: value = "task-1696644" [ 1937.857282] env[62820]: _type = "Task" [ 1937.857282] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.870387] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.946142] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696643, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.100122] env[62820]: DEBUG nova.network.neutron [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Successfully created port: 857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1938.146178] env[62820]: DEBUG nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1938.370144] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.444676] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696643, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.868563] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696644, 'name': PowerOffVM_Task, 'duration_secs': 0.934631} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.868820] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1938.869487] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1938.869702] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1938.869895] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1938.870110] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1938.870259] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1938.871033] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1938.871033] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1938.871033] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1938.871033] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1938.871243] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1938.871277] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1938.876171] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5565e9a-4365-43b9-b695-2c9343050dd3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.892139] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1938.892139] env[62820]: value = "task-1696646" [ 1938.892139] env[62820]: _type = "Task" [ 1938.892139] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.901040] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696646, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.942035] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696643, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.297132} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.942035] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8502cdb1-2a78-4742-9247-16bbadfc0ad6/8502cdb1-2a78-4742-9247-16bbadfc0ad6.vmdk to [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df/2aeeb809-0b27-411b-b632-ef4d61b295df.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1938.942768] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559fe64a-2ebf-4f4f-8f39-0371d681953b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.966404] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df/2aeeb809-0b27-411b-b632-ef4d61b295df.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1938.966725] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a55695df-5179-45e2-bf9d-b8072c55af31 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.987201] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1938.987201] env[62820]: value = "task-1696647" [ 1938.987201] env[62820]: _type = "Task" [ 1938.987201] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.995734] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696647, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.158932] env[62820]: DEBUG nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1939.188056] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1939.188339] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1939.188499] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1939.188683] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1939.188960] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1939.189138] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1939.189353] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1939.189513] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1939.189686] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1939.189881] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1939.190113] env[62820]: DEBUG nova.virt.hardware [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1939.191051] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0477f97-d404-4ea9-84c8-58a6e57b169b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.200043] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f97d0b-6d74-44de-93bd-36235d6772b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.403478] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696646, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.497657] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696647, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.507274] env[62820]: DEBUG nova.compute.manager [req-c85b7b1c-b9e2-46f0-9807-d3ca402e2b3d req-84b3f8d4-b1ac-4c97-87fc-c7d56e19f084 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Received event network-vif-plugged-857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1939.507579] env[62820]: DEBUG oslo_concurrency.lockutils [req-c85b7b1c-b9e2-46f0-9807-d3ca402e2b3d req-84b3f8d4-b1ac-4c97-87fc-c7d56e19f084 service nova] Acquiring lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.507933] env[62820]: DEBUG oslo_concurrency.lockutils [req-c85b7b1c-b9e2-46f0-9807-d3ca402e2b3d req-84b3f8d4-b1ac-4c97-87fc-c7d56e19f084 service nova] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.508212] env[62820]: DEBUG oslo_concurrency.lockutils [req-c85b7b1c-b9e2-46f0-9807-d3ca402e2b3d req-84b3f8d4-b1ac-4c97-87fc-c7d56e19f084 service nova] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.508428] env[62820]: DEBUG nova.compute.manager [req-c85b7b1c-b9e2-46f0-9807-d3ca402e2b3d req-84b3f8d4-b1ac-4c97-87fc-c7d56e19f084 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] No waiting events found dispatching network-vif-plugged-857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1939.508648] env[62820]: WARNING nova.compute.manager [req-c85b7b1c-b9e2-46f0-9807-d3ca402e2b3d req-84b3f8d4-b1ac-4c97-87fc-c7d56e19f084 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Received unexpected event network-vif-plugged-857882cd-2832-40d3-9537-cb5042e3808a for instance with vm_state building and task_state spawning. [ 1939.597860] env[62820]: DEBUG nova.network.neutron [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Successfully updated port: 857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1939.905426] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696646, 'name': ReconfigVM_Task, 'duration_secs': 0.538225} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.906223] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1c86bf-70cd-491a-8ad9-f4129c1d1b24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.926833] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1939.927087] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1939.927189] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1939.927378] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1939.927531] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1939.927733] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1939.927943] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1939.928124] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1939.928295] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1939.928459] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1939.928687] env[62820]: DEBUG nova.virt.hardware [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1939.929500] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c027296f-2e19-49dc-86db-cde1221d14dc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.935419] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1939.935419] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527d74af-8422-aaf1-1bc3-2107b5aec97a" [ 1939.935419] env[62820]: _type = "Task" [ 1939.935419] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.944229] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527d74af-8422-aaf1-1bc3-2107b5aec97a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.998535] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696647, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.099963] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1940.100166] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.100330] env[62820]: DEBUG nova.network.neutron [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1940.446541] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527d74af-8422-aaf1-1bc3-2107b5aec97a, 'name': SearchDatastore_Task, 'duration_secs': 0.011369} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.451874] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1940.452161] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1dfc6db-63d5-4043-9d7e-63e315045ad9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.493347] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1940.493347] env[62820]: value = "task-1696648" [ 1940.493347] env[62820]: _type = "Task" [ 1940.493347] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.499729] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696647, 'name': ReconfigVM_Task, 'duration_secs': 1.046771} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.502872] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df/2aeeb809-0b27-411b-b632-ef4d61b295df.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1940.503516] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696648, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.503721] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0efe7b5-dd57-4d16-a399-f87ee84ef71b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.510377] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1940.510377] env[62820]: value = "task-1696649" [ 1940.510377] env[62820]: _type = "Task" [ 1940.510377] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.520043] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696649, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.633643] env[62820]: DEBUG nova.network.neutron [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1940.758928] env[62820]: DEBUG nova.network.neutron [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Updating instance_info_cache with network_info: [{"id": "857882cd-2832-40d3-9537-cb5042e3808a", "address": "fa:16:3e:dc:96:5e", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap857882cd-28", "ovs_interfaceid": "857882cd-2832-40d3-9537-cb5042e3808a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.004290] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696648, 'name': ReconfigVM_Task, 'duration_secs': 0.180017} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.004656] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1941.005356] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd516b9-e8ca-48ac-aa59-2f815a85f3a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.027121] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d/41666e62-526d-4553-a005-07cbc2321d0d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1941.029854] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bfe3c23-9846-4579-a5cd-bf6a1311daed {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.048215] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696649, 'name': Rename_Task, 'duration_secs': 0.147522} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.049351] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1941.049656] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1941.049656] env[62820]: value = "task-1696650" [ 1941.049656] env[62820]: _type = "Task" [ 1941.049656] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.049871] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8986902-6946-47f0-96a2-d60cc4b59c11 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.058868] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696650, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.060070] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1941.060070] env[62820]: value = "task-1696651" [ 1941.060070] env[62820]: _type = "Task" [ 1941.060070] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.067167] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.261897] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1941.262209] env[62820]: DEBUG nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Instance network_info: |[{"id": "857882cd-2832-40d3-9537-cb5042e3808a", "address": "fa:16:3e:dc:96:5e", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap857882cd-28", "ovs_interfaceid": "857882cd-2832-40d3-9537-cb5042e3808a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1941.262660] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:96:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9a1e09ef-7c9c-45d9-9bf4-55b913524948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '857882cd-2832-40d3-9537-cb5042e3808a', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1941.270918] env[62820]: DEBUG oslo.service.loopingcall [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.271201] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1941.271510] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cd410a3-185d-4905-a144-dbc872455714 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.293916] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1941.293916] env[62820]: value = "task-1696652" [ 1941.293916] env[62820]: _type = "Task" [ 1941.293916] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.303310] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696652, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.532708] env[62820]: DEBUG nova.compute.manager [req-61921fa4-1a1c-467c-b14d-92487f60c1dc req-422f9e69-a34b-46b2-8af1-5128c8ee2993 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Received event network-changed-857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1941.532957] env[62820]: DEBUG nova.compute.manager [req-61921fa4-1a1c-467c-b14d-92487f60c1dc req-422f9e69-a34b-46b2-8af1-5128c8ee2993 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Refreshing instance network info cache due to event network-changed-857882cd-2832-40d3-9537-cb5042e3808a. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1941.533134] env[62820]: DEBUG oslo_concurrency.lockutils [req-61921fa4-1a1c-467c-b14d-92487f60c1dc req-422f9e69-a34b-46b2-8af1-5128c8ee2993 service nova] Acquiring lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.533282] env[62820]: DEBUG oslo_concurrency.lockutils [req-61921fa4-1a1c-467c-b14d-92487f60c1dc req-422f9e69-a34b-46b2-8af1-5128c8ee2993 service nova] Acquired lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.533446] env[62820]: DEBUG nova.network.neutron [req-61921fa4-1a1c-467c-b14d-92487f60c1dc req-422f9e69-a34b-46b2-8af1-5128c8ee2993 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Refreshing network info cache for port 857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1941.560325] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696650, 'name': ReconfigVM_Task, 'duration_secs': 0.299242} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.560662] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d/41666e62-526d-4553-a005-07cbc2321d0d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1941.564070] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0eb8f2-d2d6-49f3-9b87-7257d36b9336 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.584967] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d0cf98-ab48-4e41-89a4-3b11729bcbe7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.587327] env[62820]: DEBUG oslo_vmware.api [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696651, 'name': PowerOnVM_Task, 'duration_secs': 0.4583} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.587571] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1941.605203] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77dc6268-a32a-4dad-820e-0dc6c51b9521 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.623555] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02ada67-1e99-453d-9b9d-8732ebf2a191 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.631829] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1941.631829] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a770d65-b9a6-4f05-b3d7-1e763b264929 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.639587] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1941.639587] env[62820]: value = "task-1696653" [ 1941.639587] env[62820]: _type = "Task" [ 1941.639587] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.648256] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696653, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.701362] env[62820]: DEBUG nova.compute.manager [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1941.702223] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c04a1d2-7c95-402e-8fac-8f5300699076 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.804891] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696652, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.151822] env[62820]: DEBUG oslo_vmware.api [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696653, 'name': PowerOnVM_Task, 'duration_secs': 0.365354} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.152147] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1942.221020] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cdbcc7a-d5b3-4b8e-9f17-d1652997446f tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.895s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.249875] env[62820]: DEBUG nova.network.neutron [req-61921fa4-1a1c-467c-b14d-92487f60c1dc req-422f9e69-a34b-46b2-8af1-5128c8ee2993 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Updated VIF entry in instance network info cache for port 857882cd-2832-40d3-9537-cb5042e3808a. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1942.249875] env[62820]: DEBUG nova.network.neutron [req-61921fa4-1a1c-467c-b14d-92487f60c1dc req-422f9e69-a34b-46b2-8af1-5128c8ee2993 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Updating instance_info_cache with network_info: [{"id": "857882cd-2832-40d3-9537-cb5042e3808a", "address": "fa:16:3e:dc:96:5e", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap857882cd-28", "ovs_interfaceid": "857882cd-2832-40d3-9537-cb5042e3808a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.306743] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696652, 'name': CreateVM_Task, 'duration_secs': 0.576827} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.306913] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1942.307636] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.307811] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.308176] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1942.308459] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2a68528-df18-41cc-bb1e-141b08c93f00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.313888] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1942.313888] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52922733-9756-20e1-64f4-759b03087e80" [ 1942.313888] env[62820]: _type = "Task" [ 1942.313888] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.321983] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52922733-9756-20e1-64f4-759b03087e80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.332722] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "09d7f053-df0e-428a-98a4-a18d70c0158e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.332951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.751436] env[62820]: DEBUG oslo_concurrency.lockutils [req-61921fa4-1a1c-467c-b14d-92487f60c1dc req-422f9e69-a34b-46b2-8af1-5128c8ee2993 service nova] Releasing lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1942.824467] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52922733-9756-20e1-64f4-759b03087e80, 'name': SearchDatastore_Task, 'duration_secs': 0.014919} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.824906] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1942.825261] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1942.825560] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.825771] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.826034] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1942.826640] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0dcb1e72-2df8-4dfa-bb33-858c87dbc772 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.835054] env[62820]: DEBUG nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1942.839179] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1942.839407] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1942.840380] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b579dc37-a6d8-46d2-818a-5570d9e97a9e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.846308] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1942.846308] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52970a00-0ce4-6276-3f6a-ecf183375252" [ 1942.846308] env[62820]: _type = "Task" [ 1942.846308] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.854540] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52970a00-0ce4-6276-3f6a-ecf183375252, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.166913] env[62820]: INFO nova.compute.manager [None req-837866c2-3e66-4e54-b879-17d5a14005cb tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance to original state: 'active' [ 1943.358891] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52970a00-0ce4-6276-3f6a-ecf183375252, 'name': SearchDatastore_Task, 'duration_secs': 0.027531} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.360046] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.360679] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.361839] env[62820]: INFO nova.compute.claims [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1943.366032] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-273bc23a-59b9-48b0-8fa0-f627eb54590c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.372353] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1943.372353] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e28ded-416f-3323-cee8-b0dc5d35b33d" [ 1943.372353] env[62820]: _type = "Task" [ 1943.372353] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.381280] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e28ded-416f-3323-cee8-b0dc5d35b33d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.884699] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e28ded-416f-3323-cee8-b0dc5d35b33d, 'name': SearchDatastore_Task, 'duration_secs': 0.012512} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.885018] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.885284] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c33b3040-b93e-43f7-ab00-e29e8a307d0b/c33b3040-b93e-43f7-ab00-e29e8a307d0b.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1943.885544] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4eff98cd-fa49-43fa-be06-bdae1d68fa05 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.893081] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1943.893081] env[62820]: value = "task-1696654" [ 1943.893081] env[62820]: _type = "Task" [ 1943.893081] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.901553] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.171852] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "41666e62-526d-4553-a005-07cbc2321d0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.172245] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "41666e62-526d-4553-a005-07cbc2321d0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.172372] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "41666e62-526d-4553-a005-07cbc2321d0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.172557] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "41666e62-526d-4553-a005-07cbc2321d0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.172728] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "41666e62-526d-4553-a005-07cbc2321d0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.175070] env[62820]: INFO nova.compute.manager [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Terminating instance [ 1944.404857] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477996} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.407896] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] c33b3040-b93e-43f7-ab00-e29e8a307d0b/c33b3040-b93e-43f7-ab00-e29e8a307d0b.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1944.408134] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1944.408590] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-404bbeef-0730-49a0-ba16-6d9ced2f0786 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.415655] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1944.415655] env[62820]: value = "task-1696655" [ 1944.415655] env[62820]: _type = "Task" [ 1944.415655] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.427638] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.510677] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c32af0-c4e3-4239-a6ab-1d77abb4b401 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.518647] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2bb82b-1f70-404f-ae72-f72b67aea59a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.550161] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114fe723-cb07-4618-a508-924061e22241 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.558547] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda368dd-a9f0-4b25-9eb6-946a95006701 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.573868] env[62820]: DEBUG nova.compute.provider_tree [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1944.681633] env[62820]: DEBUG nova.compute.manager [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1944.681633] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1944.682592] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7840cd8a-afdb-44b0-b87d-8167c2e8a7b0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.691370] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1944.691618] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-681c5da0-47ee-4ab8-b758-3c1a8ed61b92 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.698832] env[62820]: DEBUG oslo_vmware.api [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1944.698832] env[62820]: value = "task-1696656" [ 1944.698832] env[62820]: _type = "Task" [ 1944.698832] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.707509] env[62820]: DEBUG oslo_vmware.api [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.926726] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079039} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.927021] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1944.927807] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76b2242-f611-45e6-99b4-88c1bcfaf529 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.950929] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] c33b3040-b93e-43f7-ab00-e29e8a307d0b/c33b3040-b93e-43f7-ab00-e29e8a307d0b.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1944.951263] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f0ff518-5675-47fe-aaa4-a701b829351c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.972548] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1944.972548] env[62820]: value = "task-1696657" [ 1944.972548] env[62820]: _type = "Task" [ 1944.972548] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.981370] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696657, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.077356] env[62820]: DEBUG nova.scheduler.client.report [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1945.212166] env[62820]: DEBUG oslo_vmware.api [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696656, 'name': PowerOffVM_Task, 'duration_secs': 0.399626} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.212513] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1945.212688] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1945.212995] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e115dcb-9ae7-4f2f-bc21-20c35f6b1e78 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.348574] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1945.348885] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1945.349167] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleting the datastore file [datastore1] 41666e62-526d-4553-a005-07cbc2321d0d {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1945.349450] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20abe6c4-c5d0-41fe-9660-d2d77b60dfa1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.358173] env[62820]: DEBUG oslo_vmware.api [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1945.358173] env[62820]: value = "task-1696659" [ 1945.358173] env[62820]: _type = "Task" [ 1945.358173] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.366141] env[62820]: DEBUG oslo_vmware.api [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.482898] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.539664] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.539981] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.585087] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.585494] env[62820]: DEBUG nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1945.868677] env[62820]: DEBUG oslo_vmware.api [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.984255] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.043439] env[62820]: INFO nova.compute.manager [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Detaching volume 1c9f0326-748e-4bd5-9616-8444644f2e72 [ 1946.073337] env[62820]: INFO nova.virt.block_device [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Attempting to driver detach volume 1c9f0326-748e-4bd5-9616-8444644f2e72 from mountpoint /dev/sdb [ 1946.073576] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1946.073762] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353697', 'volume_id': '1c9f0326-748e-4bd5-9616-8444644f2e72', 'name': 'volume-1c9f0326-748e-4bd5-9616-8444644f2e72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '901626d2-1788-4017-b0c7-52537618804c', 'attached_at': '', 'detached_at': '', 'volume_id': '1c9f0326-748e-4bd5-9616-8444644f2e72', 'serial': '1c9f0326-748e-4bd5-9616-8444644f2e72'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1946.074653] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c500dff-948e-421c-8a74-55225df4b4af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.096922] env[62820]: DEBUG nova.compute.utils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1946.100048] env[62820]: DEBUG nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1946.100048] env[62820]: DEBUG nova.network.neutron [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1946.101460] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fda95b7-8044-42f2-a116-90109cb5f19e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.109481] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8beb37ba-6f6a-4f07-b6ef-1e9b5ebe72b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.133560] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b9fff7-55d8-4044-b6dc-82ffc86a1984 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.149938] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] The volume has not been displaced from its original location: [datastore1] volume-1c9f0326-748e-4bd5-9616-8444644f2e72/volume-1c9f0326-748e-4bd5-9616-8444644f2e72.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1946.155357] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Reconfiguring VM instance instance-0000006e to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1946.157156] env[62820]: DEBUG nova.policy [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815f8967d40e4943a66da6866de8b018', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14768f5b38ea4f6abf5583ce5e4409f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1946.158867] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d64c4a5f-3397-4813-9649-ef09840534eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.177975] env[62820]: DEBUG oslo_vmware.api [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1946.177975] env[62820]: value = "task-1696660" [ 1946.177975] env[62820]: _type = "Task" [ 1946.177975] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.186716] env[62820]: DEBUG oslo_vmware.api [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.371225] env[62820]: DEBUG oslo_vmware.api [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.941426} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.371626] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1946.371706] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1946.371858] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1946.372042] env[62820]: INFO nova.compute.manager [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1946.372289] env[62820]: DEBUG oslo.service.loopingcall [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1946.372487] env[62820]: DEBUG nova.compute.manager [-] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1946.372582] env[62820]: DEBUG nova.network.neutron [-] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1946.485545] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696657, 'name': ReconfigVM_Task, 'duration_secs': 1.276532} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.485831] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Reconfigured VM instance instance-00000079 to attach disk [datastore1] c33b3040-b93e-43f7-ab00-e29e8a307d0b/c33b3040-b93e-43f7-ab00-e29e8a307d0b.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1946.486813] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da0c593b-2038-4adc-9629-22c50e34d628 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.493975] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1946.493975] env[62820]: value = "task-1696661" [ 1946.493975] env[62820]: _type = "Task" [ 1946.493975] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.503064] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696661, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.507171] env[62820]: DEBUG nova.network.neutron [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Successfully created port: 32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1946.600512] env[62820]: DEBUG nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1946.687678] env[62820]: DEBUG oslo_vmware.api [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696660, 'name': ReconfigVM_Task, 'duration_secs': 0.24351} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.687945] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Reconfigured VM instance instance-0000006e to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1946.693102] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d92a24f0-dba7-4e8a-9a1f-f35aebbf9d99 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.709720] env[62820]: DEBUG oslo_vmware.api [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1946.709720] env[62820]: value = "task-1696662" [ 1946.709720] env[62820]: _type = "Task" [ 1946.709720] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.718453] env[62820]: DEBUG oslo_vmware.api [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.004905] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696661, 'name': Rename_Task, 'duration_secs': 0.14738} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.005293] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1947.005549] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fc10e28-5645-46d0-b2a8-0bebf0dcb391 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.012884] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1947.012884] env[62820]: value = "task-1696663" [ 1947.012884] env[62820]: _type = "Task" [ 1947.012884] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.024855] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696663, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.105769] env[62820]: INFO nova.virt.block_device [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Booting with volume 02dc1a68-bb21-4b33-8d03-0d369092773d at /dev/sda [ 1947.154198] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ded2fb3-f3a7-4949-959a-dc71ae428f5d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.172400] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b347b12-e7bd-440c-ac7f-a11e9fdea136 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.206476] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8030240c-af82-40a3-b35e-4c3fff02cd22 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.218744] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943c9a82-afd1-4edd-a913-147d870bd9bc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.234197] env[62820]: DEBUG oslo_vmware.api [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696662, 'name': ReconfigVM_Task, 'duration_secs': 0.149582} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.234893] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353697', 'volume_id': '1c9f0326-748e-4bd5-9616-8444644f2e72', 'name': 'volume-1c9f0326-748e-4bd5-9616-8444644f2e72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '901626d2-1788-4017-b0c7-52537618804c', 'attached_at': '', 'detached_at': '', 'volume_id': '1c9f0326-748e-4bd5-9616-8444644f2e72', 'serial': '1c9f0326-748e-4bd5-9616-8444644f2e72'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1947.257367] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da9e178-5e05-4f87-b3ea-c253382fe02b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.265278] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbbb733-aa25-483f-b141-00bbd8cb861b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.280473] env[62820]: DEBUG nova.virt.block_device [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating existing volume attachment record: 49ef261a-f8a7-4393-937f-e7c9ec66dcd2 {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1947.429230] env[62820]: DEBUG nova.compute.manager [req-f62fc3ff-116f-4596-8b31-bc663661ae5d req-315ea53e-ce75-45b6-85f8-1072489cb157 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Received event network-vif-deleted-02ad8941-576b-4634-8cba-ffa38ff466c5 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1947.429576] env[62820]: INFO nova.compute.manager [req-f62fc3ff-116f-4596-8b31-bc663661ae5d req-315ea53e-ce75-45b6-85f8-1072489cb157 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Neutron deleted interface 02ad8941-576b-4634-8cba-ffa38ff466c5; detaching it from the instance and deleting it from the info cache [ 1947.429576] env[62820]: DEBUG nova.network.neutron [req-f62fc3ff-116f-4596-8b31-bc663661ae5d req-315ea53e-ce75-45b6-85f8-1072489cb157 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.524498] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696663, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.780051] env[62820]: DEBUG nova.objects.instance [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1947.906770] env[62820]: DEBUG nova.network.neutron [-] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.931924] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a9e0ef1-cd09-418f-b9a8-46331a410f47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.943919] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3505dd4-8e37-48dc-936d-5241e9e17ab2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.975507] env[62820]: DEBUG nova.compute.manager [req-f62fc3ff-116f-4596-8b31-bc663661ae5d req-315ea53e-ce75-45b6-85f8-1072489cb157 service nova] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Detach interface failed, port_id=02ad8941-576b-4634-8cba-ffa38ff466c5, reason: Instance 41666e62-526d-4553-a005-07cbc2321d0d could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1948.025118] env[62820]: DEBUG oslo_vmware.api [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696663, 'name': PowerOnVM_Task, 'duration_secs': 0.521213} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.025410] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1948.025620] env[62820]: INFO nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Took 8.87 seconds to spawn the instance on the hypervisor. [ 1948.025806] env[62820]: DEBUG nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1948.026587] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0786f6a8-6a17-48a0-846c-8e292d3966ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.409793] env[62820]: INFO nova.compute.manager [-] [instance: 41666e62-526d-4553-a005-07cbc2321d0d] Took 2.04 seconds to deallocate network for instance. [ 1948.548483] env[62820]: INFO nova.compute.manager [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Took 14.44 seconds to build instance. [ 1948.743613] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.787066] env[62820]: DEBUG oslo_concurrency.lockutils [None req-6b1ade94-c5f2-406a-8225-cf20e025345d tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.247s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.788224] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.045s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.788423] env[62820]: DEBUG nova.compute.manager [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1948.789545] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e21348-4140-4dd7-bc9d-26032e4d6b2d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.797188] env[62820]: DEBUG nova.compute.manager [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1948.797750] env[62820]: DEBUG nova.objects.instance [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1948.825083] env[62820]: DEBUG nova.network.neutron [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Successfully updated port: 32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1948.916387] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.916661] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.916852] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.957132] env[62820]: INFO nova.scheduler.client.report [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted allocations for instance 41666e62-526d-4553-a005-07cbc2321d0d [ 1949.051199] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fb447e9-5cfe-44d7-8264-2872663a1e8f tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.949s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.327188] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.327364] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.327578] env[62820]: DEBUG nova.network.neutron [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1949.366361] env[62820]: DEBUG nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1949.366900] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1949.367132] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1949.367291] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1949.367481] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1949.367629] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1949.367778] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1949.368100] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1949.368184] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1949.368343] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1949.368508] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1949.368679] env[62820]: DEBUG nova.virt.hardware [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1949.370319] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea6fdc1-ae41-451b-bbdd-3ea44c8f6aa7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.378657] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e583e6-e855-473a-a13d-e9f7341e0b65 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.465587] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c126e719-753b-4f97-9a12-406864a6dea6 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "41666e62-526d-4553-a005-07cbc2321d0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.293s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.535650] env[62820]: DEBUG nova.compute.manager [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Received event network-vif-plugged-32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1949.535943] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Acquiring lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.538182] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1949.538358] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.538539] env[62820]: DEBUG nova.compute.manager [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] No waiting events found dispatching network-vif-plugged-32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1949.538707] env[62820]: WARNING nova.compute.manager [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Received unexpected event network-vif-plugged-32f96b87-6a60-4c4f-877b-3ab110787004 for instance with vm_state building and task_state spawning. [ 1949.538872] env[62820]: DEBUG nova.compute.manager [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Received event network-changed-32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1949.539048] env[62820]: DEBUG nova.compute.manager [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Refreshing instance network info cache due to event network-changed-32f96b87-6a60-4c4f-877b-3ab110787004. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1949.539228] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Acquiring lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.804098] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1949.804761] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc47d681-dc3d-49a7-99bd-a633eb00beca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.812402] env[62820]: DEBUG oslo_vmware.api [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1949.812402] env[62820]: value = "task-1696664" [ 1949.812402] env[62820]: _type = "Task" [ 1949.812402] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.823183] env[62820]: DEBUG oslo_vmware.api [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.859017] env[62820]: DEBUG nova.network.neutron [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1950.007266] env[62820]: DEBUG nova.network.neutron [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance_info_cache with network_info: [{"id": "32f96b87-6a60-4c4f-877b-3ab110787004", "address": "fa:16:3e:d8:a8:4f", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f96b87-6a", "ovs_interfaceid": "32f96b87-6a60-4c4f-877b-3ab110787004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.226925] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.227166] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.323283] env[62820]: DEBUG oslo_vmware.api [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696664, 'name': PowerOffVM_Task, 'duration_secs': 0.233027} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.323482] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1950.323680] env[62820]: DEBUG nova.compute.manager [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1950.324458] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05658a5-2e5f-4a5b-92b0-26eb5f74472a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.509526] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.509915] env[62820]: DEBUG nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Instance network_info: |[{"id": "32f96b87-6a60-4c4f-877b-3ab110787004", "address": "fa:16:3e:d8:a8:4f", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f96b87-6a", "ovs_interfaceid": "32f96b87-6a60-4c4f-877b-3ab110787004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1950.510271] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Acquired lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.510452] env[62820]: DEBUG nova.network.neutron [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Refreshing network info cache for port 32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1950.511639] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:a8:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32f96b87-6a60-4c4f-877b-3ab110787004', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1950.519320] env[62820]: DEBUG oslo.service.loopingcall [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1950.522880] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1950.522880] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb2d127e-1609-4344-aeb1-3c201c6a80f7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.544897] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1950.544897] env[62820]: value = "task-1696665" [ 1950.544897] env[62820]: _type = "Task" [ 1950.544897] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.553568] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696665, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.730186] env[62820]: DEBUG nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1950.758720] env[62820]: DEBUG nova.network.neutron [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updated VIF entry in instance network info cache for port 32f96b87-6a60-4c4f-877b-3ab110787004. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1950.759182] env[62820]: DEBUG nova.network.neutron [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance_info_cache with network_info: [{"id": "32f96b87-6a60-4c4f-877b-3ab110787004", "address": "fa:16:3e:d8:a8:4f", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f96b87-6a", "ovs_interfaceid": "32f96b87-6a60-4c4f-877b-3ab110787004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.835893] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fa673377-8dff-4852-834c-d50902db6b7f tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.055602] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696665, 'name': CreateVM_Task, 'duration_secs': 0.36721} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.055774] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1951.056435] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353706', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'name': 'volume-02dc1a68-bb21-4b33-8d03-0d369092773d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '09d7f053-df0e-428a-98a4-a18d70c0158e', 'attached_at': '', 'detached_at': '', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'serial': '02dc1a68-bb21-4b33-8d03-0d369092773d'}, 'attachment_id': '49ef261a-f8a7-4393-937f-e7c9ec66dcd2', 'delete_on_termination': True, 'boot_index': 0, 'device_type': None, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=62820) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1951.056645] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Root volume attach. Driver type: vmdk {{(pid=62820) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1951.057418] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cbc39d-a062-48db-a3f4-d50807fa21a9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.065646] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae39b6ab-22e7-48aa-84c9-31f37db750a6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.072267] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c038886e-f219-4917-8d3b-bda753204319 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.078877] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-cc3eb91e-9759-4824-9f39-8ab884dbe5f8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.086595] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1951.086595] env[62820]: value = "task-1696666" [ 1951.086595] env[62820]: _type = "Task" [ 1951.086595] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.094327] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696666, 'name': RelocateVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.254145] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.254446] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.256130] env[62820]: INFO nova.compute.claims [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1951.263498] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Releasing lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.263778] env[62820]: DEBUG nova.compute.manager [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Received event network-changed-857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1951.263981] env[62820]: DEBUG nova.compute.manager [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Refreshing instance network info cache due to event network-changed-857882cd-2832-40d3-9537-cb5042e3808a. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1951.264238] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Acquiring lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1951.264433] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Acquired lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1951.264621] env[62820]: DEBUG nova.network.neutron [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Refreshing network info cache for port 857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1951.491325] env[62820]: DEBUG nova.objects.instance [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1951.597126] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696666, 'name': RelocateVM_Task, 'duration_secs': 0.364239} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.597400] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1951.597601] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353706', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'name': 'volume-02dc1a68-bb21-4b33-8d03-0d369092773d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '09d7f053-df0e-428a-98a4-a18d70c0158e', 'attached_at': '', 'detached_at': '', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'serial': '02dc1a68-bb21-4b33-8d03-0d369092773d'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1951.598374] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f354051-8b67-4864-a741-653a8c7141b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.613621] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6ff3e4-7165-4b58-906a-2453a74f3c9e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.634200] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-02dc1a68-bb21-4b33-8d03-0d369092773d/volume-02dc1a68-bb21-4b33-8d03-0d369092773d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1951.634705] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-708593ff-7d12-4a3c-b5a3-a4ee2bea18f9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.655121] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1951.655121] env[62820]: value = "task-1696667" [ 1951.655121] env[62820]: _type = "Task" [ 1951.655121] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.662683] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.989151] env[62820]: DEBUG nova.network.neutron [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Updated VIF entry in instance network info cache for port 857882cd-2832-40d3-9537-cb5042e3808a. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1951.989582] env[62820]: DEBUG nova.network.neutron [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Updating instance_info_cache with network_info: [{"id": "857882cd-2832-40d3-9537-cb5042e3808a", "address": "fa:16:3e:dc:96:5e", "network": {"id": "05c4b605-53fd-4b70-bd80-34c403481480", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-991744611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "accd5c1cf55248b780b00e33faf79fa0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9a1e09ef-7c9c-45d9-9bf4-55b913524948", "external-id": "nsx-vlan-transportzone-466", "segmentation_id": 466, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap857882cd-28", "ovs_interfaceid": "857882cd-2832-40d3-9537-cb5042e3808a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.995417] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1951.995618] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1951.995859] env[62820]: DEBUG nova.network.neutron [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1951.996111] env[62820]: DEBUG nova.objects.instance [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'info_cache' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1952.165238] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696667, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.357182] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979512b2-dee6-4da1-873b-217ad565af52 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.364989] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7b6760-392e-4663-a927-25cba4013c82 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.395391] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0df186a-f737-4088-a549-d291e2e12dce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.402996] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d3ba2b-171d-42cc-917a-bc34a3da4968 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.416920] env[62820]: DEBUG nova.compute.provider_tree [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1952.492020] env[62820]: DEBUG oslo_concurrency.lockutils [req-3146acff-e4ff-4172-a1e2-eeb3aa95774c req-09114bfa-2061-4109-bf4a-a1ed73ab84e3 service nova] Releasing lock "refresh_cache-c33b3040-b93e-43f7-ab00-e29e8a307d0b" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.497937] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.499216] env[62820]: DEBUG nova.objects.base [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Object Instance<901626d2-1788-4017-b0c7-52537618804c> lazy-loaded attributes: flavor,info_cache {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1952.500406] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.667906] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696667, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.920106] env[62820]: DEBUG nova.scheduler.client.report [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1953.008729] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.009032] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 1953.166769] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696667, 'name': ReconfigVM_Task, 'duration_secs': 1.082909} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.168950] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-02dc1a68-bb21-4b33-8d03-0d369092773d/volume-02dc1a68-bb21-4b33-8d03-0d369092773d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1953.173688] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fcfaabd8-1bfa-4800-8dda-e2725c7857e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.189358] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1953.189358] env[62820]: value = "task-1696668" [ 1953.189358] env[62820]: _type = "Task" [ 1953.189358] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.199331] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696668, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.218775] env[62820]: DEBUG nova.network.neutron [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updating instance_info_cache with network_info: [{"id": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "address": "fa:16:3e:96:64:a6", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3537ab9-0a", "ovs_interfaceid": "d3537ab9-0a82-437a-83c1-ffb18a60490a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.425411] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.171s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.425902] env[62820]: DEBUG nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1953.699220] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.721963] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Releasing lock "refresh_cache-901626d2-1788-4017-b0c7-52537618804c" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.931111] env[62820]: DEBUG nova.compute.utils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1953.933059] env[62820]: DEBUG nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1953.933059] env[62820]: DEBUG nova.network.neutron [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1953.970290] env[62820]: DEBUG nova.policy [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18246bae0222415c96ec5b252cf5bd6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57b0c64a8704e7aaeba4011866c7a24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 1954.201030] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696668, 'name': ReconfigVM_Task, 'duration_secs': 0.606966} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.201030] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353706', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'name': 'volume-02dc1a68-bb21-4b33-8d03-0d369092773d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '09d7f053-df0e-428a-98a4-a18d70c0158e', 'attached_at': '', 'detached_at': '', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'serial': '02dc1a68-bb21-4b33-8d03-0d369092773d'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1954.201555] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c096cdf-4d11-4d31-bac6-6fca7d35cd88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.208031] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1954.208031] env[62820]: value = "task-1696669" [ 1954.208031] env[62820]: _type = "Task" [ 1954.208031] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.216621] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696669, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.248950] env[62820]: DEBUG nova.network.neutron [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Successfully created port: 10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1954.436289] env[62820]: DEBUG nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1954.718802] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696669, 'name': Rename_Task, 'duration_secs': 0.12977} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.719082] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1954.719329] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c24847f-9ae8-4792-921c-52f885e34682 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.726465] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1954.726746] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1954.726746] env[62820]: value = "task-1696670" [ 1954.726746] env[62820]: _type = "Task" [ 1954.726746] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.726940] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-388199f1-32d3-4b74-9e78-8ae677561c88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.738476] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.738476] env[62820]: DEBUG oslo_vmware.api [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1954.738476] env[62820]: value = "task-1696671" [ 1954.738476] env[62820]: _type = "Task" [ 1954.738476] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.746057] env[62820]: DEBUG oslo_vmware.api [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.173930] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.173930] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.174158] env[62820]: INFO nova.compute.manager [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Shelving [ 1955.239222] env[62820]: DEBUG oslo_vmware.api [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696670, 'name': PowerOnVM_Task, 'duration_secs': 0.501529} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.242522] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1955.242739] env[62820]: INFO nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Took 5.88 seconds to spawn the instance on the hypervisor. [ 1955.242926] env[62820]: DEBUG nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1955.243707] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee63493-b1c0-474b-ab0f-5c3be0de80e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.251852] env[62820]: DEBUG oslo_vmware.api [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696671, 'name': PowerOnVM_Task, 'duration_secs': 0.477297} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.253294] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1955.253503] env[62820]: DEBUG nova.compute.manager [None req-dfb9d581-bb65-46b8-9c8c-242b37474cbd tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1955.257552] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac54f40-35fc-41ab-b322-9fbe94c1b211 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.447691] env[62820]: DEBUG nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1955.475349] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1955.475599] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1955.475757] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1955.475941] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1955.476105] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1955.476256] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1955.476465] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1955.476624] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1955.476790] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1955.476952] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1955.477138] env[62820]: DEBUG nova.virt.hardware [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1955.478023] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2802c7-d187-4353-99c3-a9779f024a30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.486332] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6570a7a0-3bd8-4d4d-b31b-c6b1ba76ed24 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.682694] env[62820]: DEBUG nova.compute.manager [req-7ea83a28-346b-49f8-8423-d2d619abed5a req-decacc9f-fa86-4aed-8e94-0561525e1480 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Received event network-vif-plugged-10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1955.682911] env[62820]: DEBUG oslo_concurrency.lockutils [req-7ea83a28-346b-49f8-8423-d2d619abed5a req-decacc9f-fa86-4aed-8e94-0561525e1480 service nova] Acquiring lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.683130] env[62820]: DEBUG oslo_concurrency.lockutils [req-7ea83a28-346b-49f8-8423-d2d619abed5a req-decacc9f-fa86-4aed-8e94-0561525e1480 service nova] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.683361] env[62820]: DEBUG oslo_concurrency.lockutils [req-7ea83a28-346b-49f8-8423-d2d619abed5a req-decacc9f-fa86-4aed-8e94-0561525e1480 service nova] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.683532] env[62820]: DEBUG nova.compute.manager [req-7ea83a28-346b-49f8-8423-d2d619abed5a req-decacc9f-fa86-4aed-8e94-0561525e1480 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] No waiting events found dispatching network-vif-plugged-10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1955.683657] env[62820]: WARNING nova.compute.manager [req-7ea83a28-346b-49f8-8423-d2d619abed5a req-decacc9f-fa86-4aed-8e94-0561525e1480 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Received unexpected event network-vif-plugged-10550a85-a1ac-4990-b2e8-34972567d45b for instance with vm_state building and task_state spawning. [ 1955.768902] env[62820]: DEBUG nova.network.neutron [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Successfully updated port: 10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1955.774214] env[62820]: INFO nova.compute.manager [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Took 12.43 seconds to build instance. [ 1956.024464] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Didn't find any instances for network info cache update. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10372}} [ 1956.024685] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.024850] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.025012] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.025170] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.025313] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.025455] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.026053] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 1956.026053] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.188234] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1956.188577] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef1ae6c3-4c0d-4d59-a661-db2e7d509cf5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.198560] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1956.198560] env[62820]: value = "task-1696672" [ 1956.198560] env[62820]: _type = "Task" [ 1956.198560] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.212159] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.274325] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.274692] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.274822] env[62820]: DEBUG nova.network.neutron [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1956.276737] env[62820]: DEBUG oslo_concurrency.lockutils [None req-5da2f064-72a1-4ce3-b230-45abbfad6d92 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.944s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.529232] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.529454] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.529619] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.529775] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1956.530846] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb6c59d-782d-4813-bb37-4b9ff677969b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.540055] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca106f4c-4300-41c7-a3dd-368ef30313a4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.557213] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeaa004a-db5d-4d42-a6da-5c5aa7b7eeea {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.565365] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be188e42-1378-4e11-bae0-3e220f3b9528 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.597473] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179610MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1956.597669] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.597840] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.709198] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696672, 'name': PowerOffVM_Task, 'duration_secs': 0.274739} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.709764] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1956.710655] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84a4b02-8072-4962-87b8-4207053ff90c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.730121] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73ef554-1e20-4366-bbcd-9672cddc802f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.822842] env[62820]: DEBUG nova.network.neutron [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1957.059065] env[62820]: DEBUG nova.network.neutron [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updating instance_info_cache with network_info: [{"id": "10550a85-a1ac-4990-b2e8-34972567d45b", "address": "fa:16:3e:2e:d9:60", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10550a85-a1", "ovs_interfaceid": "10550a85-a1ac-4990-b2e8-34972567d45b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.241015] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Creating Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1957.241379] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c29e9fd8-49f0-4a3f-b9ec-5d6c85a751da {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.250506] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1957.250506] env[62820]: value = "task-1696673" [ 1957.250506] env[62820]: _type = "Task" [ 1957.250506] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.261247] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696673, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.562610] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1957.562974] env[62820]: DEBUG nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Instance network_info: |[{"id": "10550a85-a1ac-4990-b2e8-34972567d45b", "address": "fa:16:3e:2e:d9:60", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10550a85-a1", "ovs_interfaceid": "10550a85-a1ac-4990-b2e8-34972567d45b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1957.563138] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:d9:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10550a85-a1ac-4990-b2e8-34972567d45b', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1957.571019] env[62820]: DEBUG oslo.service.loopingcall [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1957.571326] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1957.571598] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db784237-b201-4173-bd83-875d2a15b62b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.592487] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1957.592487] env[62820]: value = "task-1696674" [ 1957.592487] env[62820]: _type = "Task" [ 1957.592487] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.601058] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696674, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.632711] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1957.632883] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 901626d2-1788-4017-b0c7-52537618804c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1957.633024] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 72cdf2b2-fb69-4820-a663-56bfe92572d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1957.633163] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 7a923678-5eea-4149-9a6d-0594fdb532c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1957.633308] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 2aeeb809-0b27-411b-b632-ef4d61b295df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1957.633441] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance c33b3040-b93e-43f7-ab00-e29e8a307d0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1957.633559] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance ab74220e-d9c4-4c96-a38d-9935dd3e13c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1957.714912] env[62820]: DEBUG nova.compute.manager [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Received event network-changed-10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1957.715148] env[62820]: DEBUG nova.compute.manager [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Refreshing instance network info cache due to event network-changed-10550a85-a1ac-4990-b2e8-34972567d45b. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1957.715363] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Acquiring lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.715508] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Acquired lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.715666] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Refreshing network info cache for port 10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1957.760945] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696673, 'name': CreateSnapshot_Task, 'duration_secs': 0.45263} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.761218] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Created Snapshot of the VM instance {{(pid=62820) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1957.762024] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d98682-0f12-45af-ac88-f40379d9f3ba {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.103223] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696674, 'name': CreateVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.138050] env[62820]: INFO nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 557b40e4-0e79-4be3-b950-3c4303004af0 has allocations against this compute host but is not found in the database. [ 1958.138050] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1958.138050] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1958.271662] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae73edb3-00f6-424e-8f73-7ad9a3464e6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.281449] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Creating linked-clone VM from snapshot {{(pid=62820) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1958.281843] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-05744998-ceaa-43b6-a486-eef9b4bb3e28 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.290711] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40082469-c2ca-4cfd-aaec-57ecc397fec9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.297082] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1958.297082] env[62820]: value = "task-1696675" [ 1958.297082] env[62820]: _type = "Task" [ 1958.297082] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.328498] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c184f4a4-5f8a-4fde-96d1-a745f3f839f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.334607] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696675, 'name': CloneVM_Task} progress is 12%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.340086] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e18d306-9e09-4cbe-bb7b-6ed24d65fa28 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.356036] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1958.455869] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updated VIF entry in instance network info cache for port 10550a85-a1ac-4990-b2e8-34972567d45b. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1958.456303] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updating instance_info_cache with network_info: [{"id": "10550a85-a1ac-4990-b2e8-34972567d45b", "address": "fa:16:3e:2e:d9:60", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10550a85-a1", "ovs_interfaceid": "10550a85-a1ac-4990-b2e8-34972567d45b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.530166] env[62820]: DEBUG nova.compute.manager [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Stashing vm_state: active {{(pid=62820) _prep_resize /opt/stack/nova/nova/compute/manager.py:5968}} [ 1958.603799] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696674, 'name': CreateVM_Task, 'duration_secs': 0.936614} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.603799] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1958.604231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1958.604231] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1958.604506] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1958.604744] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83ebc1ee-a841-4fa7-aa90-d7e27f60d78b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.609421] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1958.609421] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]528158d3-d7c3-5da8-50fc-d57d7d4ee056" [ 1958.609421] env[62820]: _type = "Task" [ 1958.609421] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.616895] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528158d3-d7c3-5da8-50fc-d57d7d4ee056, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.807993] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696675, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.860704] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1958.958776] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Releasing lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1958.959067] env[62820]: DEBUG nova.compute.manager [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Received event network-changed-4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1958.959230] env[62820]: DEBUG nova.compute.manager [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Refreshing instance network info cache due to event network-changed-4b6e11a8-0891-4efe-bc15-3803f5edc4c0. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1958.959436] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Acquiring lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1958.959585] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Acquired lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1958.959757] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Refreshing network info cache for port 4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1959.048085] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.120634] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]528158d3-d7c3-5da8-50fc-d57d7d4ee056, 'name': SearchDatastore_Task, 'duration_secs': 0.032489} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.120914] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.121166] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1959.121417] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.121566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.121760] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1959.122053] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20222fa7-bb41-4f17-ad18-48fcc5954b47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.130581] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1959.130771] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1959.131726] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c38d0f0-b7fe-4845-901a-1672a6ea20af {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.136769] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1959.136769] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]523f66fd-80f0-f4c3-5492-716e4ef8ee94" [ 1959.136769] env[62820]: _type = "Task" [ 1959.136769] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.144742] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523f66fd-80f0-f4c3-5492-716e4ef8ee94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.308682] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696675, 'name': CloneVM_Task} progress is 94%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.365712] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1959.365968] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.768s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.366321] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.318s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.647930] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]523f66fd-80f0-f4c3-5492-716e4ef8ee94, 'name': SearchDatastore_Task, 'duration_secs': 0.011109} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.648979] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faf31fa1-74e7-4dda-9eba-a503afe71d97 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.654716] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1959.654716] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52804630-5e52-dac1-8f66-90042123e7f0" [ 1959.654716] env[62820]: _type = "Task" [ 1959.654716] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.664582] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52804630-5e52-dac1-8f66-90042123e7f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.808277] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696675, 'name': CloneVM_Task, 'duration_secs': 1.423833} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.808515] env[62820]: INFO nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Created linked-clone VM from snapshot [ 1959.809260] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5f7cb3-33bd-4e4b-bb4f-edb5aec5d667 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.816673] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Uploading image fd800fb1-435f-4a8d-a070-d66c5f9af240 {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1959.843390] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1959.843390] env[62820]: value = "vm-353711" [ 1959.843390] env[62820]: _type = "VirtualMachine" [ 1959.843390] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1959.843658] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bc6daf4d-8eff-4ea4-b7ad-9171285047aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.851168] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease: (returnval){ [ 1959.851168] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ff4a38-8305-1e88-b584-43fac28905f9" [ 1959.851168] env[62820]: _type = "HttpNfcLease" [ 1959.851168] env[62820]: } obtained for exporting VM: (result){ [ 1959.851168] env[62820]: value = "vm-353711" [ 1959.851168] env[62820]: _type = "VirtualMachine" [ 1959.851168] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1959.851566] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the lease: (returnval){ [ 1959.851566] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ff4a38-8305-1e88-b584-43fac28905f9" [ 1959.851566] env[62820]: _type = "HttpNfcLease" [ 1959.851566] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1959.857873] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1959.857873] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ff4a38-8305-1e88-b584-43fac28905f9" [ 1959.857873] env[62820]: _type = "HttpNfcLease" [ 1959.857873] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1959.871054] env[62820]: INFO nova.compute.claims [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1959.921015] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updated VIF entry in instance network info cache for port 4b6e11a8-0891-4efe-bc15-3803f5edc4c0. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1959.921385] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updating instance_info_cache with network_info: [{"id": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "address": "fa:16:3e:03:5b:10", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6e11a8-08", "ovs_interfaceid": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.165052] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52804630-5e52-dac1-8f66-90042123e7f0, 'name': SearchDatastore_Task, 'duration_secs': 0.013049} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.165331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.165597] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ab74220e-d9c4-4c96-a38d-9935dd3e13c0/ab74220e-d9c4-4c96-a38d-9935dd3e13c0.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1960.165843] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a40e487-672d-468a-bb2a-d9bfed811c0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.172723] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1960.172723] env[62820]: value = "task-1696677" [ 1960.172723] env[62820]: _type = "Task" [ 1960.172723] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.179878] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696677, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.360377] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1960.360377] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ff4a38-8305-1e88-b584-43fac28905f9" [ 1960.360377] env[62820]: _type = "HttpNfcLease" [ 1960.360377] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1960.360742] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1960.360742] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52ff4a38-8305-1e88-b584-43fac28905f9" [ 1960.360742] env[62820]: _type = "HttpNfcLease" [ 1960.360742] env[62820]: }. {{(pid=62820) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1960.361514] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0921277-3de6-4ca5-8a4d-70ffd3060a95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.369264] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5206b12b-14eb-9703-6395-972dbf8840a9/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1960.369489] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5206b12b-14eb-9703-6395-972dbf8840a9/disk-0.vmdk for reading. {{(pid=62820) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1960.430773] env[62820]: INFO nova.compute.resource_tracker [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating resource usage from migration 557b40e4-0e79-4be3-b950-3c4303004af0 [ 1960.433659] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Releasing lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.433907] env[62820]: DEBUG nova.compute.manager [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Received event network-changed-32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1960.434090] env[62820]: DEBUG nova.compute.manager [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Refreshing instance network info cache due to event network-changed-32f96b87-6a60-4c4f-877b-3ab110787004. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1960.434305] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Acquiring lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.434447] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Acquired lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.434608] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Refreshing network info cache for port 32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1960.508035] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-62e1d4c2-c402-4363-a79e-4bd802906bb7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.580181] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8525df08-87cb-482b-9b1e-7dac0c23216f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.589068] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134ba973-4f8d-4169-9f99-94a7725fdba8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.622522] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912006dd-b674-4987-9555-4b86a8bb1587 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.631664] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ce4ac0-a490-45c1-9e10-337f60d61ceb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.645448] env[62820]: DEBUG nova.compute.provider_tree [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1960.682677] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696677, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47436} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.683015] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] ab74220e-d9c4-4c96-a38d-9935dd3e13c0/ab74220e-d9c4-4c96-a38d-9935dd3e13c0.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1960.683168] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1960.683414] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b08f0b37-eced-41db-890b-e35c70a8c2b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.690016] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1960.690016] env[62820]: value = "task-1696678" [ 1960.690016] env[62820]: _type = "Task" [ 1960.690016] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.701486] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696678, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.148543] env[62820]: DEBUG nova.scheduler.client.report [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1961.161431] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updated VIF entry in instance network info cache for port 32f96b87-6a60-4c4f-877b-3ab110787004. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1961.161942] env[62820]: DEBUG nova.network.neutron [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance_info_cache with network_info: [{"id": "32f96b87-6a60-4c4f-877b-3ab110787004", "address": "fa:16:3e:d8:a8:4f", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f96b87-6a", "ovs_interfaceid": "32f96b87-6a60-4c4f-877b-3ab110787004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.199501] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696678, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063412} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.199865] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1961.200735] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d084508-1869-4137-8597-5ca05735a21d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.223486] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] ab74220e-d9c4-4c96-a38d-9935dd3e13c0/ab74220e-d9c4-4c96-a38d-9935dd3e13c0.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1961.223589] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84521082-18f3-424c-a701-7b258f6b095f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.244808] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1961.244808] env[62820]: value = "task-1696679" [ 1961.244808] env[62820]: _type = "Task" [ 1961.244808] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.253893] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.653701] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.287s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.653701] env[62820]: INFO nova.compute.manager [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Migrating [ 1961.668477] env[62820]: DEBUG oslo_concurrency.lockutils [req-0e75ce97-fd07-4290-b3e4-118542ae1fe1 req-54014ddd-712f-4162-80d6-512e4439424c service nova] Releasing lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.756410] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696679, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.171019] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1962.171395] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.171588] env[62820]: DEBUG nova.network.neutron [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1962.256200] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696679, 'name': ReconfigVM_Task, 'duration_secs': 0.920665} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.257160] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Reconfigured VM instance instance-0000007b to attach disk [datastore1] ab74220e-d9c4-4c96-a38d-9935dd3e13c0/ab74220e-d9c4-4c96-a38d-9935dd3e13c0.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1962.257356] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d136e106-bf67-424a-8752-06029ab29552 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.264281] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1962.264281] env[62820]: value = "task-1696680" [ 1962.264281] env[62820]: _type = "Task" [ 1962.264281] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.272700] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696680, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.774797] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696680, 'name': Rename_Task, 'duration_secs': 0.201224} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.775212] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1962.775370] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fbcae7b-274a-445e-99fe-0864746e17c3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.783020] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 1962.783020] env[62820]: value = "task-1696681" [ 1962.783020] env[62820]: _type = "Task" [ 1962.783020] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.791059] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.901638] env[62820]: DEBUG nova.network.neutron [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance_info_cache with network_info: [{"id": "32f96b87-6a60-4c4f-877b-3ab110787004", "address": "fa:16:3e:d8:a8:4f", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f96b87-6a", "ovs_interfaceid": "32f96b87-6a60-4c4f-877b-3ab110787004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.294321] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696681, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.405081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.794129] env[62820]: DEBUG oslo_vmware.api [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696681, 'name': PowerOnVM_Task, 'duration_secs': 0.688679} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.794479] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1963.794585] env[62820]: INFO nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1963.794768] env[62820]: DEBUG nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1963.795589] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856f4531-d1e3-43fb-be36-66e730bd4e30 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.312554] env[62820]: INFO nova.compute.manager [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Took 13.08 seconds to build instance. [ 1964.654455] env[62820]: DEBUG nova.compute.manager [req-bdc5460b-2961-467f-af7f-0bc9c213828d req-6a2fb0fb-8427-44bd-a126-a205dc4ffe31 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Received event network-changed-10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1964.654455] env[62820]: DEBUG nova.compute.manager [req-bdc5460b-2961-467f-af7f-0bc9c213828d req-6a2fb0fb-8427-44bd-a126-a205dc4ffe31 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Refreshing instance network info cache due to event network-changed-10550a85-a1ac-4990-b2e8-34972567d45b. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1964.654907] env[62820]: DEBUG oslo_concurrency.lockutils [req-bdc5460b-2961-467f-af7f-0bc9c213828d req-6a2fb0fb-8427-44bd-a126-a205dc4ffe31 service nova] Acquiring lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.655047] env[62820]: DEBUG oslo_concurrency.lockutils [req-bdc5460b-2961-467f-af7f-0bc9c213828d req-6a2fb0fb-8427-44bd-a126-a205dc4ffe31 service nova] Acquired lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.655262] env[62820]: DEBUG nova.network.neutron [req-bdc5460b-2961-467f-af7f-0bc9c213828d req-6a2fb0fb-8427-44bd-a126-a205dc4ffe31 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Refreshing network info cache for port 10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1964.814679] env[62820]: DEBUG oslo_concurrency.lockutils [None req-dcba8a48-54b4-43d5-bf34-6e3c1bc0ef61 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.587s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.920378] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40624707-fc02-44eb-b174-f7c777617b00 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.939596] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance '09d7f053-df0e-428a-98a4-a18d70c0158e' progress to 0 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1965.375219] env[62820]: DEBUG nova.network.neutron [req-bdc5460b-2961-467f-af7f-0bc9c213828d req-6a2fb0fb-8427-44bd-a126-a205dc4ffe31 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updated VIF entry in instance network info cache for port 10550a85-a1ac-4990-b2e8-34972567d45b. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1965.375626] env[62820]: DEBUG nova.network.neutron [req-bdc5460b-2961-467f-af7f-0bc9c213828d req-6a2fb0fb-8427-44bd-a126-a205dc4ffe31 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updating instance_info_cache with network_info: [{"id": "10550a85-a1ac-4990-b2e8-34972567d45b", "address": "fa:16:3e:2e:d9:60", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10550a85-a1", "ovs_interfaceid": "10550a85-a1ac-4990-b2e8-34972567d45b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.445440] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1965.445759] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-504c4d7c-efec-4f4e-8b9e-06f1b532ac0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.455690] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1965.455690] env[62820]: value = "task-1696682" [ 1965.455690] env[62820]: _type = "Task" [ 1965.455690] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.464501] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696682, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.878590] env[62820]: DEBUG oslo_concurrency.lockutils [req-bdc5460b-2961-467f-af7f-0bc9c213828d req-6a2fb0fb-8427-44bd-a126-a205dc4ffe31 service nova] Releasing lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1965.966583] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696682, 'name': PowerOffVM_Task, 'duration_secs': 0.296954} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.966893] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1965.967110] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance '09d7f053-df0e-428a-98a4-a18d70c0158e' progress to 17 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1966.474315] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1966.474617] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1966.474810] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1966.475045] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1966.475209] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1966.475402] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1966.475637] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1966.475818] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1966.476052] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1966.476249] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1966.476429] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1966.481895] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17dc856f-0754-4369-b074-01699d55a4c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.499183] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1966.499183] env[62820]: value = "task-1696683" [ 1966.499183] env[62820]: _type = "Task" [ 1966.499183] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.508563] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696683, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.009364] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696683, 'name': ReconfigVM_Task, 'duration_secs': 0.197653} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.009723] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance '09d7f053-df0e-428a-98a4-a18d70c0158e' progress to 33 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1967.516217] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1967.516486] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1967.516651] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1967.516842] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1967.517074] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1967.517290] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1967.517522] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1967.517760] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1967.517892] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1967.518072] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1967.518268] env[62820]: DEBUG nova.virt.hardware [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1967.523858] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1967.524225] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee487295-aea2-400b-8aa2-13bf046eb14e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.544301] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1967.544301] env[62820]: value = "task-1696684" [ 1967.544301] env[62820]: _type = "Task" [ 1967.544301] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.553102] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696684, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.884697] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5206b12b-14eb-9703-6395-972dbf8840a9/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1967.885697] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988fe765-7afa-4f9e-9ea8-1ccda4d1110f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.892216] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5206b12b-14eb-9703-6395-972dbf8840a9/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1967.892421] env[62820]: ERROR oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5206b12b-14eb-9703-6395-972dbf8840a9/disk-0.vmdk due to incomplete transfer. [ 1967.892655] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-57387727-a379-44b7-9117-918e057923db {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.899566] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5206b12b-14eb-9703-6395-972dbf8840a9/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1967.899750] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Uploaded image fd800fb1-435f-4a8d-a070-d66c5f9af240 to the Glance image server {{(pid=62820) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1967.902103] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Destroying the VM {{(pid=62820) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1967.902333] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c6a599ab-998b-42b0-9c0c-68a1a0fe6904 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.908396] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1967.908396] env[62820]: value = "task-1696685" [ 1967.908396] env[62820]: _type = "Task" [ 1967.908396] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.915852] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696685, 'name': Destroy_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.054448] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696684, 'name': ReconfigVM_Task, 'duration_secs': 0.243804} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.054796] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1968.055626] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc851e2d-fc06-4e6a-84ab-9df0734af051 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.080122] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-02dc1a68-bb21-4b33-8d03-0d369092773d/volume-02dc1a68-bb21-4b33-8d03-0d369092773d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1968.080795] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68f539b6-77c1-40aa-976b-1313db159367 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.098935] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1968.098935] env[62820]: value = "task-1696686" [ 1968.098935] env[62820]: _type = "Task" [ 1968.098935] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.106875] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696686, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.418350] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696685, 'name': Destroy_Task, 'duration_secs': 0.413333} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.418590] env[62820]: INFO nova.virt.vmwareapi.vm_util [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Destroyed the VM [ 1968.418793] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Deleting Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1968.419048] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7268129d-f9f8-4fc3-8cff-d193c09cc0ce {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.425695] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1968.425695] env[62820]: value = "task-1696687" [ 1968.425695] env[62820]: _type = "Task" [ 1968.425695] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.433131] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696687, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.609156] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696686, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.935806] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696687, 'name': RemoveSnapshot_Task, 'duration_secs': 0.327998} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.936122] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Deleted Snapshot of the VM instance {{(pid=62820) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1968.936409] env[62820]: DEBUG nova.compute.manager [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1968.937181] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3876951b-2b9e-4a79-92f2-4fca24699523 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.108428] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696686, 'name': ReconfigVM_Task, 'duration_secs': 0.651375} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.108774] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-02dc1a68-bb21-4b33-8d03-0d369092773d/volume-02dc1a68-bb21-4b33-8d03-0d369092773d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1969.108952] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance '09d7f053-df0e-428a-98a4-a18d70c0158e' progress to 50 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1969.449184] env[62820]: INFO nova.compute.manager [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Shelve offloading [ 1969.615849] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa82e450-84a6-454f-b812-771ce134007d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.635658] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752149bb-42fc-4918-968d-7a2d1ae4d02f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.657211] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance '09d7f053-df0e-428a-98a4-a18d70c0158e' progress to 67 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1969.952623] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1969.953029] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1375fd24-7a1c-446b-8e45-c9b0bdbf2791 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.960523] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1969.960523] env[62820]: value = "task-1696688" [ 1969.960523] env[62820]: _type = "Task" [ 1969.960523] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.968067] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.471436] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] VM already powered off {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1970.471874] env[62820]: DEBUG nova.compute.manager [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1970.472432] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e77304-d15f-4327-90bc-0344cec97e0c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.484767] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.484986] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.485114] env[62820]: DEBUG nova.network.neutron [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1971.247409] env[62820]: DEBUG nova.network.neutron [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [{"id": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "address": "fa:16:3e:1f:49:02", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ebb702-61", "ovs_interfaceid": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1971.296261] env[62820]: DEBUG nova.network.neutron [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Port 32f96b87-6a60-4c4f-877b-3ab110787004 binding to destination host cpu-1 is already ACTIVE {{(pid=62820) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1971.751067] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.020307] env[62820]: DEBUG nova.compute.manager [req-43430b6e-98dc-487c-ad2d-6ad2b7afdc03 req-3401551e-0a07-4eff-bbbc-99a743850ecd service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received event network-vif-unplugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1972.020540] env[62820]: DEBUG oslo_concurrency.lockutils [req-43430b6e-98dc-487c-ad2d-6ad2b7afdc03 req-3401551e-0a07-4eff-bbbc-99a743850ecd service nova] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.020739] env[62820]: DEBUG oslo_concurrency.lockutils [req-43430b6e-98dc-487c-ad2d-6ad2b7afdc03 req-3401551e-0a07-4eff-bbbc-99a743850ecd service nova] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.020902] env[62820]: DEBUG oslo_concurrency.lockutils [req-43430b6e-98dc-487c-ad2d-6ad2b7afdc03 req-3401551e-0a07-4eff-bbbc-99a743850ecd service nova] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.021160] env[62820]: DEBUG nova.compute.manager [req-43430b6e-98dc-487c-ad2d-6ad2b7afdc03 req-3401551e-0a07-4eff-bbbc-99a743850ecd service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] No waiting events found dispatching network-vif-unplugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1972.021360] env[62820]: WARNING nova.compute.manager [req-43430b6e-98dc-487c-ad2d-6ad2b7afdc03 req-3401551e-0a07-4eff-bbbc-99a743850ecd service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received unexpected event network-vif-unplugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 for instance with vm_state shelved and task_state shelving_offloading. [ 1972.125478] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1972.126421] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6242d35-5d43-42d4-bc0a-dcbcff97a636 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.134593] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1972.134833] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24a0e8e9-15c6-4497-8c87-dcb5fb3caa93 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.232483] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1972.232743] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1972.232865] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleting the datastore file [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1972.233147] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c903221c-a870-4dd4-b968-772be726d0a2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.240055] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1972.240055] env[62820]: value = "task-1696690" [ 1972.240055] env[62820]: _type = "Task" [ 1972.240055] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.248252] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696690, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.319330] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.319626] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.319759] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.751904] env[62820]: DEBUG oslo_vmware.api [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696690, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162944} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.752246] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1972.752362] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1972.752525] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1972.774017] env[62820]: INFO nova.scheduler.client.report [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted allocations for instance 72cdf2b2-fb69-4820-a663-56bfe92572d2 [ 1973.278932] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.279238] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.279482] env[62820]: DEBUG nova.objects.instance [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'resources' on Instance uuid 72cdf2b2-fb69-4820-a663-56bfe92572d2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1973.352670] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.353041] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.353041] env[62820]: DEBUG nova.network.neutron [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.783095] env[62820]: DEBUG nova.objects.instance [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'numa_topology' on Instance uuid 72cdf2b2-fb69-4820-a663-56bfe92572d2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1974.049167] env[62820]: DEBUG nova.compute.manager [req-af20586e-100d-425a-80a0-b6d5cde12aa0 req-608c6235-7a0e-4593-a3b5-1eb201c21e82 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received event network-changed-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1974.049167] env[62820]: DEBUG nova.compute.manager [req-af20586e-100d-425a-80a0-b6d5cde12aa0 req-608c6235-7a0e-4593-a3b5-1eb201c21e82 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Refreshing instance network info cache due to event network-changed-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1974.049167] env[62820]: DEBUG oslo_concurrency.lockutils [req-af20586e-100d-425a-80a0-b6d5cde12aa0 req-608c6235-7a0e-4593-a3b5-1eb201c21e82 service nova] Acquiring lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.049167] env[62820]: DEBUG oslo_concurrency.lockutils [req-af20586e-100d-425a-80a0-b6d5cde12aa0 req-608c6235-7a0e-4593-a3b5-1eb201c21e82 service nova] Acquired lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.049167] env[62820]: DEBUG nova.network.neutron [req-af20586e-100d-425a-80a0-b6d5cde12aa0 req-608c6235-7a0e-4593-a3b5-1eb201c21e82 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Refreshing network info cache for port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1974.091657] env[62820]: DEBUG nova.network.neutron [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance_info_cache with network_info: [{"id": "32f96b87-6a60-4c4f-877b-3ab110787004", "address": "fa:16:3e:d8:a8:4f", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f96b87-6a", "ovs_interfaceid": "32f96b87-6a60-4c4f-877b-3ab110787004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.285080] env[62820]: DEBUG nova.objects.base [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Object Instance<72cdf2b2-fb69-4820-a663-56bfe92572d2> lazy-loaded attributes: resources,numa_topology {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1974.395804] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289417ab-234e-4789-90a3-3a1fd8c612aa {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.405030] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab051660-e5c9-41de-9744-49ec8aa11edb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.437106] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcc6d3f-8d67-4a68-8a18-b03fe16f3dd2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.448578] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8391d6b0-d533-4a12-9a34-2ff314e8c00c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.463061] env[62820]: DEBUG nova.compute.provider_tree [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1974.593962] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.751814] env[62820]: DEBUG nova.network.neutron [req-af20586e-100d-425a-80a0-b6d5cde12aa0 req-608c6235-7a0e-4593-a3b5-1eb201c21e82 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updated VIF entry in instance network info cache for port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1974.752205] env[62820]: DEBUG nova.network.neutron [req-af20586e-100d-425a-80a0-b6d5cde12aa0 req-608c6235-7a0e-4593-a3b5-1eb201c21e82 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [{"id": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "address": "fa:16:3e:1f:49:02", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": null, "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap24ebb702-61", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.946061] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.966174] env[62820]: DEBUG nova.scheduler.client.report [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1975.104983] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444dee51-1a89-4a81-ad3d-e1598432d208 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.113766] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9e04cb-3c7f-4fbf-8254-39142ce5ee95 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.255207] env[62820]: DEBUG oslo_concurrency.lockutils [req-af20586e-100d-425a-80a0-b6d5cde12aa0 req-608c6235-7a0e-4593-a3b5-1eb201c21e82 service nova] Releasing lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.471899] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.192s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.981214] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0cb81538-00b1-4ace-86de-189f1805ff24 tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.807s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.982037] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.036s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.982244] env[62820]: INFO nova.compute.manager [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Unshelving [ 1976.209079] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba371d5-10ea-4fb1-be8a-2c148747bd82 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.229230] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee4f43a-5627-4d0f-8cdb-e9780f4d97c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.236043] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance '09d7f053-df0e-428a-98a4-a18d70c0158e' progress to 83 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1976.742717] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1976.743242] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd72098d-4dc1-4fc8-9407-f432ffe9ef3d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.751501] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1976.751501] env[62820]: value = "task-1696691" [ 1976.751501] env[62820]: _type = "Task" [ 1976.751501] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.759290] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.005242] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.005540] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.005773] env[62820]: DEBUG nova.objects.instance [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'pci_requests' on Instance uuid 72cdf2b2-fb69-4820-a663-56bfe92572d2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1977.261924] env[62820]: DEBUG oslo_vmware.api [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696691, 'name': PowerOnVM_Task, 'duration_secs': 0.349115} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.262178] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1977.262367] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-b44e68f5-67e1-4a1b-93a5-223b55550ad3 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance '09d7f053-df0e-428a-98a4-a18d70c0158e' progress to 100 {{(pid=62820) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1977.510158] env[62820]: DEBUG nova.objects.instance [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'numa_topology' on Instance uuid 72cdf2b2-fb69-4820-a663-56bfe92572d2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1978.013074] env[62820]: INFO nova.compute.claims [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1978.224183] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.224456] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.224671] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.224854] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.225038] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.227034] env[62820]: INFO nova.compute.manager [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Terminating instance [ 1978.730545] env[62820]: DEBUG nova.compute.manager [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1978.730775] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1978.731703] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc966dbf-181e-4cd4-82c9-ef8323d9858b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.739468] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1978.739688] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3777ec43-a339-48a1-99ac-9125e5e8b7f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.745982] env[62820]: DEBUG oslo_vmware.api [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1978.745982] env[62820]: value = "task-1696692" [ 1978.745982] env[62820]: _type = "Task" [ 1978.745982] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.753636] env[62820]: DEBUG oslo_vmware.api [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696692, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.128292] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6b60aa-9ffc-4f63-8f13-c164718e0902 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.136685] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ac1964-912f-451f-9068-78cf50ab739a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.166652] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2795a095-b1fd-4d30-b55b-f2609d1a6365 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.174608] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5335bb47-807c-4912-bbf7-8e398fe8e938 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.189676] env[62820]: DEBUG nova.compute.provider_tree [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1979.256312] env[62820]: DEBUG oslo_vmware.api [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696692, 'name': PowerOffVM_Task, 'duration_secs': 0.182022} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.256660] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1979.256916] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1979.257171] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1429f54c-17b0-4469-b9cc-4d567033304d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.343316] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1979.343971] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1979.344226] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleting the datastore file [datastore1] 2aeeb809-0b27-411b-b632-ef4d61b295df {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1979.344500] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17e194a3-5330-4927-9166-9044140fbf66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.351831] env[62820]: DEBUG oslo_vmware.api [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for the task: (returnval){ [ 1979.351831] env[62820]: value = "task-1696694" [ 1979.351831] env[62820]: _type = "Task" [ 1979.351831] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.360207] env[62820]: DEBUG oslo_vmware.api [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696694, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.457126] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "09d7f053-df0e-428a-98a4-a18d70c0158e" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.457557] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.457768] env[62820]: DEBUG nova.compute.manager [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Going to confirm migration 10 {{(pid=62820) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5127}} [ 1979.693161] env[62820]: DEBUG nova.scheduler.client.report [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1979.861469] env[62820]: DEBUG oslo_vmware.api [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Task: {'id': task-1696694, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136305} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.861727] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1979.861984] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1979.862098] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1979.862272] env[62820]: INFO nova.compute.manager [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1979.862514] env[62820]: DEBUG oslo.service.loopingcall [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1979.862702] env[62820]: DEBUG nova.compute.manager [-] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1979.862797] env[62820]: DEBUG nova.network.neutron [-] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1980.006655] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.006841] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquired lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.007036] env[62820]: DEBUG nova.network.neutron [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1980.007227] env[62820]: DEBUG nova.objects.instance [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lazy-loading 'info_cache' on Instance uuid 09d7f053-df0e-428a-98a4-a18d70c0158e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1980.198783] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.193s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.252601] env[62820]: INFO nova.network.neutron [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1980.531176] env[62820]: DEBUG nova.compute.manager [req-b9c1f99c-dd19-48f2-8139-9c09c3b9bebf req-b7cd1a19-aa60-4fb8-9ed6-ae20c336a7a8 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Received event network-vif-deleted-7f7affc8-f587-4484-9eef-211d6ea80226 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1980.531403] env[62820]: INFO nova.compute.manager [req-b9c1f99c-dd19-48f2-8139-9c09c3b9bebf req-b7cd1a19-aa60-4fb8-9ed6-ae20c336a7a8 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Neutron deleted interface 7f7affc8-f587-4484-9eef-211d6ea80226; detaching it from the instance and deleting it from the info cache [ 1980.531576] env[62820]: DEBUG nova.network.neutron [req-b9c1f99c-dd19-48f2-8139-9c09c3b9bebf req-b7cd1a19-aa60-4fb8-9ed6-ae20c336a7a8 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.997496] env[62820]: DEBUG nova.network.neutron [-] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.034763] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b9788b1-8682-4786-a6fa-19dc3c6f031a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.045235] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b396267d-5d52-42f4-bede-a26ae107822b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.080017] env[62820]: DEBUG nova.compute.manager [req-b9c1f99c-dd19-48f2-8139-9c09c3b9bebf req-b7cd1a19-aa60-4fb8-9ed6-ae20c336a7a8 service nova] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Detach interface failed, port_id=7f7affc8-f587-4484-9eef-211d6ea80226, reason: Instance 2aeeb809-0b27-411b-b632-ef4d61b295df could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1981.284321] env[62820]: DEBUG nova.network.neutron [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance_info_cache with network_info: [{"id": "32f96b87-6a60-4c4f-877b-3ab110787004", "address": "fa:16:3e:d8:a8:4f", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f96b87-6a", "ovs_interfaceid": "32f96b87-6a60-4c4f-877b-3ab110787004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.500131] env[62820]: INFO nova.compute.manager [-] [instance: 2aeeb809-0b27-411b-b632-ef4d61b295df] Took 1.64 seconds to deallocate network for instance. [ 1981.788927] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Releasing lock "refresh_cache-09d7f053-df0e-428a-98a4-a18d70c0158e" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.789213] env[62820]: DEBUG nova.objects.instance [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lazy-loading 'migration_context' on Instance uuid 09d7f053-df0e-428a-98a4-a18d70c0158e {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1982.005993] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.006355] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.006613] env[62820]: DEBUG nova.objects.instance [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lazy-loading 'resources' on Instance uuid 2aeeb809-0b27-411b-b632-ef4d61b295df {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1982.291886] env[62820]: DEBUG nova.objects.base [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Object Instance<09d7f053-df0e-428a-98a4-a18d70c0158e> lazy-loaded attributes: info_cache,migration_context {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1982.292982] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b53d285-8dd3-4a0f-9a11-e23e9f71d18e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.313565] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ad5ac8-6e5d-432a-80e9-8da56ee6c35d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.319487] env[62820]: DEBUG oslo_vmware.api [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 1982.319487] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d81568-1691-0b0d-9b80-3987d2d7c793" [ 1982.319487] env[62820]: _type = "Task" [ 1982.319487] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.327080] env[62820]: DEBUG oslo_vmware.api [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d81568-1691-0b0d-9b80-3987d2d7c793, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.431295] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.431491] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.431676] env[62820]: DEBUG nova.network.neutron [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1982.559688] env[62820]: DEBUG nova.compute.manager [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received event network-vif-plugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1982.559914] env[62820]: DEBUG oslo_concurrency.lockutils [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.560349] env[62820]: DEBUG oslo_concurrency.lockutils [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.560535] env[62820]: DEBUG oslo_concurrency.lockutils [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.560709] env[62820]: DEBUG nova.compute.manager [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] No waiting events found dispatching network-vif-plugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1982.560877] env[62820]: WARNING nova.compute.manager [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received unexpected event network-vif-plugged-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 for instance with vm_state shelved_offloaded and task_state spawning. [ 1982.561061] env[62820]: DEBUG nova.compute.manager [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received event network-changed-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1982.561323] env[62820]: DEBUG nova.compute.manager [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Refreshing instance network info cache due to event network-changed-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 1982.561386] env[62820]: DEBUG oslo_concurrency.lockutils [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] Acquiring lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.621137] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897c03a9-16dc-4733-8432-5a38cf0deb81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.628257] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fb081d-1c51-419e-b2ed-d783498dfd9c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.658863] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfb3bde-52c4-4bc6-a207-0bf3f93bfe88 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.665994] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52394f42-4994-4e11-ad24-7e675e6d8c15 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.679308] env[62820]: DEBUG nova.compute.provider_tree [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1982.830616] env[62820]: DEBUG oslo_vmware.api [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d81568-1691-0b0d-9b80-3987d2d7c793, 'name': SearchDatastore_Task, 'duration_secs': 0.008775} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.830903] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.135554] env[62820]: DEBUG nova.network.neutron [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [{"id": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "address": "fa:16:3e:1f:49:02", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ebb702-61", "ovs_interfaceid": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.182487] env[62820]: DEBUG nova.scheduler.client.report [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1983.638382] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.640786] env[62820]: DEBUG oslo_concurrency.lockutils [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] Acquired lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.640989] env[62820]: DEBUG nova.network.neutron [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Refreshing network info cache for port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1983.665509] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='60f41c9af33b3f13c85b365182c1a26d',container_format='bare',created_at=2024-12-10T16:58:51Z,direct_url=,disk_format='vmdk',id=fd800fb1-435f-4a8d-a070-d66c5f9af240,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-171927475-shelved',owner='7fef128f5c704730b335b62f6cce0416',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-12-10T16:59:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1983.665722] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1983.665869] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1983.666065] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1983.666219] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1983.666367] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1983.666573] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1983.666729] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1983.666893] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1983.667074] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1983.667254] env[62820]: DEBUG nova.virt.hardware [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1983.668328] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d6c5eb-10a2-498c-bc87-6922150e04c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.676775] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a450f378-c8d2-4ff0-b990-42b4733a936c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.690543] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.692602] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:49:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1983.699858] env[62820]: DEBUG oslo.service.loopingcall [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1983.700316] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.869s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.701433] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1983.701801] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f80e746-95f2-4e47-9a57-df4653778bf2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.719049] env[62820]: INFO nova.scheduler.client.report [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Deleted allocations for instance 2aeeb809-0b27-411b-b632-ef4d61b295df [ 1983.724591] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1983.724591] env[62820]: value = "task-1696695" [ 1983.724591] env[62820]: _type = "Task" [ 1983.724591] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.734139] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696695, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.229079] env[62820]: DEBUG oslo_concurrency.lockutils [None req-afc2711d-8152-4b57-862f-9ae985d32459 tempest-AttachVolumeShelveTestJSON-857513570 tempest-AttachVolumeShelveTestJSON-857513570-project-member] Lock "2aeeb809-0b27-411b-b632-ef4d61b295df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.004s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.238321] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696695, 'name': CreateVM_Task, 'duration_secs': 0.313617} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.238485] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1984.239142] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.239337] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.239696] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1984.239981] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4075b7a-b2d6-479f-8d54-96f8e22abf81 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.246753] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1984.246753] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52641ca3-9611-6694-cf1b-0938ae35598a" [ 1984.246753] env[62820]: _type = "Task" [ 1984.246753] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.255363] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52641ca3-9611-6694-cf1b-0938ae35598a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.316464] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5911dcef-85eb-4736-8f08-c562b3d5189f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.325074] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7afa5de-4a7d-44af-abcf-133775858e7d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.359251] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6d89fc-9446-4cd7-b0a7-665dda8332ae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.367269] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bdb5a1-c66b-426d-9ea0-4812b083bcbe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.380591] env[62820]: DEBUG nova.compute.provider_tree [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1984.421622] env[62820]: DEBUG nova.network.neutron [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updated VIF entry in instance network info cache for port 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1984.422046] env[62820]: DEBUG nova.network.neutron [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [{"id": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "address": "fa:16:3e:1f:49:02", "network": {"id": "b1204c58-eb4a-47fb-bfc0-08aaa2790d87", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-323854531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fef128f5c704730b335b62f6cce0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ebb702-61", "ovs_interfaceid": "24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.757216] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.758019] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Processing image fd800fb1-435f-4a8d-a070-d66c5f9af240 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1984.758432] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240/fd800fb1-435f-4a8d-a070-d66c5f9af240.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.758605] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240/fd800fb1-435f-4a8d-a070-d66c5f9af240.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.758795] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1984.759059] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eaf980b4-775b-42ea-a5e3-06ba358a3d05 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.767836] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1984.768087] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1984.768796] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2d5eeea-eda6-4521-8a53-aa19dd2e0a6f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.774210] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1984.774210] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52d8df88-0a31-7b3d-4833-c797f5223df2" [ 1984.774210] env[62820]: _type = "Task" [ 1984.774210] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.782087] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52d8df88-0a31-7b3d-4833-c797f5223df2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.884642] env[62820]: DEBUG nova.scheduler.client.report [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1984.925156] env[62820]: DEBUG oslo_concurrency.lockutils [req-d97f564a-48b3-43b9-b657-33401df17256 req-adab5b59-efbf-4228-bff4-076256077d7f service nova] Releasing lock "refresh_cache-72cdf2b2-fb69-4820-a663-56bfe92572d2" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.284486] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Preparing fetch location {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1985.284717] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Fetch image to [datastore1] OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30/OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30.vmdk {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1985.284875] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Downloading stream optimized image fd800fb1-435f-4a8d-a070-d66c5f9af240 to [datastore1] OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30/OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30.vmdk on the data store datastore1 as vApp {{(pid=62820) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1985.285058] env[62820]: DEBUG nova.virt.vmwareapi.images [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Downloading image file data fd800fb1-435f-4a8d-a070-d66c5f9af240 to the ESX as VM named 'OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30' {{(pid=62820) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1985.357140] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1985.357140] env[62820]: value = "resgroup-9" [ 1985.357140] env[62820]: _type = "ResourcePool" [ 1985.357140] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1985.357454] env[62820]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-322f970a-95c7-4292-8dea-df5d8127fc64 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.378651] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease: (returnval){ [ 1985.378651] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522425ef-43c5-d11f-b08e-45588cf8d186" [ 1985.378651] env[62820]: _type = "HttpNfcLease" [ 1985.378651] env[62820]: } obtained for vApp import into resource pool (val){ [ 1985.378651] env[62820]: value = "resgroup-9" [ 1985.378651] env[62820]: _type = "ResourcePool" [ 1985.378651] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1985.379182] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the lease: (returnval){ [ 1985.379182] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522425ef-43c5-d11f-b08e-45588cf8d186" [ 1985.379182] env[62820]: _type = "HttpNfcLease" [ 1985.379182] env[62820]: } to be ready. {{(pid=62820) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1985.386327] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1985.386327] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522425ef-43c5-d11f-b08e-45588cf8d186" [ 1985.386327] env[62820]: _type = "HttpNfcLease" [ 1985.386327] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1985.737559] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.738336] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.888325] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1985.888325] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522425ef-43c5-d11f-b08e-45588cf8d186" [ 1985.888325] env[62820]: _type = "HttpNfcLease" [ 1985.888325] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1985.893590] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.193s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.241158] env[62820]: DEBUG nova.compute.utils [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1986.388434] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1986.388434] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522425ef-43c5-d11f-b08e-45588cf8d186" [ 1986.388434] env[62820]: _type = "HttpNfcLease" [ 1986.388434] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1986.451340] env[62820]: INFO nova.scheduler.client.report [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted allocation for migration 557b40e4-0e79-4be3-b950-3c4303004af0 [ 1986.744456] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.889388] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1986.889388] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522425ef-43c5-d11f-b08e-45588cf8d186" [ 1986.889388] env[62820]: _type = "HttpNfcLease" [ 1986.889388] env[62820]: } is initializing. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1986.956816] env[62820]: DEBUG oslo_concurrency.lockutils [None req-50d03180-286e-47f8-b012-53fdf17f41b5 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.499s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.024035] env[62820]: INFO nova.compute.manager [None req-c8a1d857-639b-4b1b-ae39-9ec5d8ee8099 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Get console output [ 1987.024439] env[62820]: WARNING nova.virt.vmwareapi.driver [None req-c8a1d857-639b-4b1b-ae39-9ec5d8ee8099 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] The console log is missing. Check your VSPC configuration [ 1987.390480] env[62820]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1987.390480] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522425ef-43c5-d11f-b08e-45588cf8d186" [ 1987.390480] env[62820]: _type = "HttpNfcLease" [ 1987.390480] env[62820]: } is ready. {{(pid=62820) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1987.390867] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1987.390867] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]522425ef-43c5-d11f-b08e-45588cf8d186" [ 1987.390867] env[62820]: _type = "HttpNfcLease" [ 1987.390867] env[62820]: }. {{(pid=62820) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1987.391539] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5da9c0-6e41-4c46-8313-1988e02cab33 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.398436] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d24f1c-ee2a-ce49-2443-53d2fbb32fa7/disk-0.vmdk from lease info. {{(pid=62820) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1987.398578] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d24f1c-ee2a-ce49-2443-53d2fbb32fa7/disk-0.vmdk. {{(pid=62820) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1987.462282] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b19119f9-21ce-43db-9752-ff2e8dc9d214 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.797200] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.797449] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.797691] env[62820]: INFO nova.compute.manager [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Attaching volume 46145c46-a0cb-404f-9960-262b06b6116d to /dev/sdb [ 1987.834228] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412cc034-81e7-4b58-a344-204a731612cb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.845639] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8ead7d-47e6-4c86-9e73-11337fbaa9c6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.859224] env[62820]: DEBUG nova.virt.block_device [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Updating existing volume attachment record: e4a211a5-8cb9-4414-9eab-2d1fc689131d {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1988.567349] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Completed reading data from the image iterator. {{(pid=62820) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1988.567716] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d24f1c-ee2a-ce49-2443-53d2fbb32fa7/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1988.568884] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8318c2a3-e33e-4bb8-9ac1-37933225667c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.576792] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d24f1c-ee2a-ce49-2443-53d2fbb32fa7/disk-0.vmdk is in state: ready. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1988.577009] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d24f1c-ee2a-ce49-2443-53d2fbb32fa7/disk-0.vmdk. {{(pid=62820) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1988.577279] env[62820]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-e3936f4d-b0af-40a2-affb-58aa2e794466 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.741911] env[62820]: DEBUG oslo_vmware.rw_handles [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d24f1c-ee2a-ce49-2443-53d2fbb32fa7/disk-0.vmdk. {{(pid=62820) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1988.742255] env[62820]: INFO nova.virt.vmwareapi.images [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Downloaded image file data fd800fb1-435f-4a8d-a070-d66c5f9af240 [ 1988.743278] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdba6c4e-824a-4227-a222-399a0d5052c8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.758605] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1222e54b-0376-42d9-91a5-b08ac15c6f7b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.792766] env[62820]: INFO nova.virt.vmwareapi.images [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] The imported VM was unregistered [ 1988.795151] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Caching image {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1988.795399] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Creating directory with path [datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240 {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1988.795660] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-791eb780-94bc-4d0f-8cce-8a518b1bbfae {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.805462] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Created directory with path [datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240 {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1988.805638] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30/OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30.vmdk to [datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240/fd800fb1-435f-4a8d-a070-d66c5f9af240.vmdk. {{(pid=62820) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1988.805862] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c8a0b93f-6ba4-48e5-a117-4114c1a9f145 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.811891] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1988.811891] env[62820]: value = "task-1696700" [ 1988.811891] env[62820]: _type = "Task" [ 1988.811891] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.818937] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696700, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.323747] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696700, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.825668] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696700, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.328050] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696700, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.831846] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696700, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.325784] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696700, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.462679} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.326466] env[62820]: INFO nova.virt.vmwareapi.ds_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30/OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30.vmdk to [datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240/fd800fb1-435f-4a8d-a070-d66c5f9af240.vmdk. [ 1991.326685] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Cleaning up location [datastore1] OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1991.326851] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_8bfbad04-3dfa-4b21-9350-50e911357f30 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1991.327137] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-506e14b2-d6ba-48d1-82a9-54d435ea0bdf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.334497] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1991.334497] env[62820]: value = "task-1696702" [ 1991.334497] env[62820]: _type = "Task" [ 1991.334497] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.342979] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.794421] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.794703] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.794938] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "901626d2-1788-4017-b0c7-52537618804c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.795159] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.795336] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.797588] env[62820]: INFO nova.compute.manager [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Terminating instance [ 1991.844672] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696702, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041546} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.845046] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1991.845131] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240/fd800fb1-435f-4a8d-a070-d66c5f9af240.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.845317] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240/fd800fb1-435f-4a8d-a070-d66c5f9af240.vmdk to [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2/72cdf2b2-fb69-4820-a663-56bfe92572d2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1991.845562] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3fafc97-4fb1-4ec3-a18a-4af90bf75674 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.852377] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1991.852377] env[62820]: value = "task-1696703" [ 1991.852377] env[62820]: _type = "Task" [ 1991.852377] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.860009] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.301949] env[62820]: DEBUG nova.compute.manager [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1992.302189] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1992.303185] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74921935-acbb-4353-b0d2-4e06adc2bb67 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.312942] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1992.313242] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61a07d16-5b40-4ea9-a4b7-9facfa8f7e69 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.321671] env[62820]: DEBUG oslo_vmware.api [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1992.321671] env[62820]: value = "task-1696704" [ 1992.321671] env[62820]: _type = "Task" [ 1992.321671] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.333843] env[62820]: DEBUG oslo_vmware.api [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.364186] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696703, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.405352] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1992.405666] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353714', 'volume_id': '46145c46-a0cb-404f-9960-262b06b6116d', 'name': 'volume-46145c46-a0cb-404f-9960-262b06b6116d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c33b3040-b93e-43f7-ab00-e29e8a307d0b', 'attached_at': '', 'detached_at': '', 'volume_id': '46145c46-a0cb-404f-9960-262b06b6116d', 'serial': '46145c46-a0cb-404f-9960-262b06b6116d'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1992.406756] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778d8a26-3373-4937-b389-7d33dd3e15eb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.424772] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4752d4-4ac4-4562-ade2-63e965350c10 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.455066] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] volume-46145c46-a0cb-404f-9960-262b06b6116d/volume-46145c46-a0cb-404f-9960-262b06b6116d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1992.455424] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7794b48d-ace5-4059-98e9-e58be28531d3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.476598] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1992.476598] env[62820]: value = "task-1696705" [ 1992.476598] env[62820]: _type = "Task" [ 1992.476598] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.488426] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696705, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.833419] env[62820]: DEBUG oslo_vmware.api [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.866619] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696703, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.989759] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696705, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.336019] env[62820]: DEBUG oslo_vmware.api [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696704, 'name': PowerOffVM_Task, 'duration_secs': 0.993153} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.336450] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1993.336724] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1993.337023] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32a8709d-b1ea-4d2f-9002-148561b84adc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.364511] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696703, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.489928] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696705, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.826590] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1993.827431] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1993.827533] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Deleting the datastore file [datastore1] 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1993.827786] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-410e6626-a253-488c-bf1b-fa6780181524 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.836797] env[62820]: DEBUG oslo_vmware.api [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 1993.836797] env[62820]: value = "task-1696707" [ 1993.836797] env[62820]: _type = "Task" [ 1993.836797] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.847711] env[62820]: DEBUG oslo_vmware.api [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696707, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.867075] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696703, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.991549] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696705, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.347149] env[62820]: DEBUG oslo_vmware.api [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696707, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.364438] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696703, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.225797} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.364693] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/fd800fb1-435f-4a8d-a070-d66c5f9af240/fd800fb1-435f-4a8d-a070-d66c5f9af240.vmdk to [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2/72cdf2b2-fb69-4820-a663-56bfe92572d2.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1994.365460] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6131ef3-5c04-4159-9d57-0c44ec38998b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.386560] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2/72cdf2b2-fb69-4820-a663-56bfe92572d2.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1994.386809] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c4b431f-b37f-43ea-a480-5bd17645e313 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.406389] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1994.406389] env[62820]: value = "task-1696708" [ 1994.406389] env[62820]: _type = "Task" [ 1994.406389] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.413954] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696708, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.489315] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696705, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.847460] env[62820]: DEBUG oslo_vmware.api [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696707, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.671894} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.847714] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1994.847895] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1994.848091] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1994.848264] env[62820]: INFO nova.compute.manager [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 901626d2-1788-4017-b0c7-52537618804c] Took 2.55 seconds to destroy the instance on the hypervisor. [ 1994.848512] env[62820]: DEBUG oslo.service.loopingcall [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1994.848700] env[62820]: DEBUG nova.compute.manager [-] [instance: 901626d2-1788-4017-b0c7-52537618804c] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1994.848805] env[62820]: DEBUG nova.network.neutron [-] [instance: 901626d2-1788-4017-b0c7-52537618804c] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1994.916975] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696708, 'name': ReconfigVM_Task, 'duration_secs': 0.315146} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.917301] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2/72cdf2b2-fb69-4820-a663-56bfe92572d2.vmdk or device None with type streamOptimized {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1994.917875] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc39a3ce-9339-477d-a143-ea98c4777de1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.925201] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1994.925201] env[62820]: value = "task-1696709" [ 1994.925201] env[62820]: _type = "Task" [ 1994.925201] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.932770] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696709, 'name': Rename_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.989944] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696705, 'name': ReconfigVM_Task, 'duration_secs': 2.066124} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.990259] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Reconfigured VM instance instance-00000079 to attach disk [datastore1] volume-46145c46-a0cb-404f-9960-262b06b6116d/volume-46145c46-a0cb-404f-9960-262b06b6116d.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1994.994867] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8761ff1d-0c46-401b-bb25-901ac28dcc8c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.009777] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1995.009777] env[62820]: value = "task-1696710" [ 1995.009777] env[62820]: _type = "Task" [ 1995.009777] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.017533] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696710, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.272606] env[62820]: DEBUG nova.compute.manager [req-17b22820-97c9-45fa-a143-446016d4df2a req-abbcbaf3-46a7-483c-9018-63bfcdfe7199 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Received event network-vif-deleted-d3537ab9-0a82-437a-83c1-ffb18a60490a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1995.272845] env[62820]: INFO nova.compute.manager [req-17b22820-97c9-45fa-a143-446016d4df2a req-abbcbaf3-46a7-483c-9018-63bfcdfe7199 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Neutron deleted interface d3537ab9-0a82-437a-83c1-ffb18a60490a; detaching it from the instance and deleting it from the info cache [ 1995.272982] env[62820]: DEBUG nova.network.neutron [req-17b22820-97c9-45fa-a143-446016d4df2a req-abbcbaf3-46a7-483c-9018-63bfcdfe7199 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.435886] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696709, 'name': Rename_Task, 'duration_secs': 0.139615} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.436394] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1995.436813] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e391e703-b19d-4e5b-a1d9-ad82db8a5299 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.444297] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1995.444297] env[62820]: value = "task-1696711" [ 1995.444297] env[62820]: _type = "Task" [ 1995.444297] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.452750] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.519676] env[62820]: DEBUG oslo_vmware.api [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696710, 'name': ReconfigVM_Task, 'duration_secs': 0.143383} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.519676] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353714', 'volume_id': '46145c46-a0cb-404f-9960-262b06b6116d', 'name': 'volume-46145c46-a0cb-404f-9960-262b06b6116d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c33b3040-b93e-43f7-ab00-e29e8a307d0b', 'attached_at': '', 'detached_at': '', 'volume_id': '46145c46-a0cb-404f-9960-262b06b6116d', 'serial': '46145c46-a0cb-404f-9960-262b06b6116d'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1995.752492] env[62820]: DEBUG nova.network.neutron [-] [instance: 901626d2-1788-4017-b0c7-52537618804c] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.775357] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70a10d10-5d78-4590-8124-04b8b8887c27 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.784512] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d7a074-fa60-4ff6-9180-85c6d1ad9a44 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.812672] env[62820]: DEBUG nova.compute.manager [req-17b22820-97c9-45fa-a143-446016d4df2a req-abbcbaf3-46a7-483c-9018-63bfcdfe7199 service nova] [instance: 901626d2-1788-4017-b0c7-52537618804c] Detach interface failed, port_id=d3537ab9-0a82-437a-83c1-ffb18a60490a, reason: Instance 901626d2-1788-4017-b0c7-52537618804c could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 1995.956537] env[62820]: DEBUG oslo_vmware.api [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696711, 'name': PowerOnVM_Task, 'duration_secs': 0.451946} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.957028] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1996.064780] env[62820]: DEBUG nova.compute.manager [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1996.065728] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3634f21-3ae3-43da-9579-0420a9565726 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.255203] env[62820]: INFO nova.compute.manager [-] [instance: 901626d2-1788-4017-b0c7-52537618804c] Took 1.41 seconds to deallocate network for instance. [ 1996.558055] env[62820]: DEBUG nova.objects.instance [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'flavor' on Instance uuid c33b3040-b93e-43f7-ab00-e29e8a307d0b {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1996.585024] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0497417c-8fc4-4355-94ab-7ba12139c1fa tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.600s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.761835] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1996.762136] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1996.762362] env[62820]: DEBUG nova.objects.instance [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'resources' on Instance uuid 901626d2-1788-4017-b0c7-52537618804c {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1997.063401] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7f721cf9-2c56-4680-9f74-5d120291b3ff tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.266s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.146392] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.146601] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.146813] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.146992] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.147173] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.149057] env[62820]: INFO nova.compute.manager [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Terminating instance [ 1997.325141] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.325393] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.356044] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4356a1-86d1-461d-9e9d-19814a877bd8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.364352] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8cdb58-0f06-4c84-9f70-5e8a2c105d76 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.394594] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0517dc1-1de6-4130-ae0e-a792f4162a52 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.401505] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b222426-d264-46cc-a676-47983aec5be7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.414454] env[62820]: DEBUG nova.compute.provider_tree [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1997.652295] env[62820]: DEBUG nova.compute.manager [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1997.652486] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1997.653415] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb5906a-aa57-424c-a964-9a00f95bb404 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.661583] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1997.661811] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82eb0787-1cd3-4399-81c1-745cfa43425e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.668052] env[62820]: DEBUG oslo_vmware.api [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1997.668052] env[62820]: value = "task-1696712" [ 1997.668052] env[62820]: _type = "Task" [ 1997.668052] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.675323] env[62820]: DEBUG oslo_vmware.api [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696712, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.828496] env[62820]: INFO nova.compute.manager [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Detaching volume 46145c46-a0cb-404f-9960-262b06b6116d [ 1997.864940] env[62820]: INFO nova.virt.block_device [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Attempting to driver detach volume 46145c46-a0cb-404f-9960-262b06b6116d from mountpoint /dev/sdb [ 1997.864940] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1997.865091] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353714', 'volume_id': '46145c46-a0cb-404f-9960-262b06b6116d', 'name': 'volume-46145c46-a0cb-404f-9960-262b06b6116d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c33b3040-b93e-43f7-ab00-e29e8a307d0b', 'attached_at': '', 'detached_at': '', 'volume_id': '46145c46-a0cb-404f-9960-262b06b6116d', 'serial': '46145c46-a0cb-404f-9960-262b06b6116d'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1997.865952] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebf60dc-9081-4727-b9aa-a29cc6ed48a1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.887630] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c73c1d-b6dc-41b8-a2fa-dd504761ff32 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.894549] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766fbc57-3aba-47ad-b98f-c17794bcd13a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.914571] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4602340-cf5a-469c-9616-47ef91cc416d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.917328] env[62820]: DEBUG nova.scheduler.client.report [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1997.932650] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] The volume has not been displaced from its original location: [datastore1] volume-46145c46-a0cb-404f-9960-262b06b6116d/volume-46145c46-a0cb-404f-9960-262b06b6116d.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1997.938073] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Reconfiguring VM instance instance-00000079 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1997.939106] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f2ad1ec-1e16-4450-88ae-1fa5284bb058 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.957151] env[62820]: DEBUG oslo_vmware.api [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1997.957151] env[62820]: value = "task-1696713" [ 1997.957151] env[62820]: _type = "Task" [ 1997.957151] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.965549] env[62820]: DEBUG oslo_vmware.api [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696713, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.177340] env[62820]: DEBUG oslo_vmware.api [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696712, 'name': PowerOffVM_Task, 'duration_secs': 0.181342} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.177626] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1998.177792] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1998.178052] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23c29cba-5c2b-45d6-9958-75eb1e869518 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.388211] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1998.388448] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1998.388635] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleting the datastore file [datastore1] 72cdf2b2-fb69-4820-a663-56bfe92572d2 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1998.388904] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-924cef4a-6dea-471e-86b3-de2510d98367 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.395538] env[62820]: DEBUG oslo_vmware.api [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for the task: (returnval){ [ 1998.395538] env[62820]: value = "task-1696715" [ 1998.395538] env[62820]: _type = "Task" [ 1998.395538] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.402798] env[62820]: DEBUG oslo_vmware.api [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.422650] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.443388] env[62820]: INFO nova.scheduler.client.report [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Deleted allocations for instance 901626d2-1788-4017-b0c7-52537618804c [ 1998.466595] env[62820]: DEBUG oslo_vmware.api [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696713, 'name': ReconfigVM_Task, 'duration_secs': 0.222805} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.466849] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Reconfigured VM instance instance-00000079 to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1998.471734] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df008574-7973-45a9-8816-9f3194224a20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.486747] env[62820]: DEBUG oslo_vmware.api [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 1998.486747] env[62820]: value = "task-1696716" [ 1998.486747] env[62820]: _type = "Task" [ 1998.486747] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.496602] env[62820]: DEBUG oslo_vmware.api [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696716, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.905527] env[62820]: DEBUG oslo_vmware.api [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Task: {'id': task-1696715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132727} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.905785] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1998.905973] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1998.906158] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.906330] env[62820]: INFO nova.compute.manager [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1998.906570] env[62820]: DEBUG oslo.service.loopingcall [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1998.906776] env[62820]: DEBUG nova.compute.manager [-] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1998.906873] env[62820]: DEBUG nova.network.neutron [-] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1998.955056] env[62820]: DEBUG oslo_concurrency.lockutils [None req-a432580e-8651-487b-a4e5-f7d717b06f54 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "901626d2-1788-4017-b0c7-52537618804c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.160s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.997136] env[62820]: DEBUG oslo_vmware.api [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696716, 'name': ReconfigVM_Task, 'duration_secs': 0.134963} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.997440] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353714', 'volume_id': '46145c46-a0cb-404f-9960-262b06b6116d', 'name': 'volume-46145c46-a0cb-404f-9960-262b06b6116d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c33b3040-b93e-43f7-ab00-e29e8a307d0b', 'attached_at': '', 'detached_at': '', 'volume_id': '46145c46-a0cb-404f-9960-262b06b6116d', 'serial': '46145c46-a0cb-404f-9960-262b06b6116d'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1999.367951] env[62820]: DEBUG nova.compute.manager [req-fd6c8884-0f1d-46f5-803f-3404e77acde3 req-c3842725-bf4e-438e-8215-72c6d32f5d45 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Received event network-vif-deleted-24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 1999.368263] env[62820]: INFO nova.compute.manager [req-fd6c8884-0f1d-46f5-803f-3404e77acde3 req-c3842725-bf4e-438e-8215-72c6d32f5d45 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Neutron deleted interface 24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3; detaching it from the instance and deleting it from the info cache [ 1999.370255] env[62820]: DEBUG nova.network.neutron [req-fd6c8884-0f1d-46f5-803f-3404e77acde3 req-c3842725-bf4e-438e-8215-72c6d32f5d45 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.559614] env[62820]: DEBUG nova.objects.instance [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'flavor' on Instance uuid c33b3040-b93e-43f7-ab00-e29e8a307d0b {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1999.841385] env[62820]: DEBUG nova.network.neutron [-] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.871450] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-deef1ca8-8a5a-4c77-b03f-3b5a8f273f58 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.881356] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad1f7d0-c3b2-4836-a34d-6d2f4c00836f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.910327] env[62820]: DEBUG nova.compute.manager [req-fd6c8884-0f1d-46f5-803f-3404e77acde3 req-c3842725-bf4e-438e-8215-72c6d32f5d45 service nova] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Detach interface failed, port_id=24ebb702-6105-4cd7-ae6b-f95fc7dbc7e3, reason: Instance 72cdf2b2-fb69-4820-a663-56bfe92572d2 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 2000.344071] env[62820]: INFO nova.compute.manager [-] [instance: 72cdf2b2-fb69-4820-a663-56bfe92572d2] Took 1.44 seconds to deallocate network for instance. [ 2000.571609] env[62820]: DEBUG oslo_concurrency.lockutils [None req-b4041197-e906-46c3-9dd5-8b64b05efb70 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.245s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.851092] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.851092] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.851092] env[62820]: DEBUG nova.objects.instance [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lazy-loading 'resources' on Instance uuid 72cdf2b2-fb69-4820-a663-56bfe92572d2 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2001.086794] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.086917] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.443452] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fa6e98-340a-4296-9449-2816cc1b3a9b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.452533] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2696c7c8-fc9f-4736-9897-45aeee698e8e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.481859] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fcc71d-14e4-48e5-a8ef-90f88b0db497 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.489654] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc15de04-7558-4a4d-a93b-8ac4bdd91a1c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.502784] env[62820]: DEBUG nova.compute.provider_tree [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2001.590020] env[62820]: DEBUG nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Starting instance... {{(pid=62820) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2001.601015] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.601296] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.601524] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.601714] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.601995] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.603971] env[62820]: INFO nova.compute.manager [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Terminating instance [ 2002.006518] env[62820]: DEBUG nova.scheduler.client.report [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2002.096997] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.097194] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.097367] env[62820]: DEBUG nova.compute.manager [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2002.100225] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb57165a-ffb8-49b2-9375-956bfb93f4c1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.107455] env[62820]: DEBUG nova.compute.manager [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2002.107653] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2002.107929] env[62820]: DEBUG nova.compute.manager [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62820) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2002.108431] env[62820]: DEBUG nova.objects.instance [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'flavor' on Instance uuid ab74220e-d9c4-4c96-a38d-9935dd3e13c0 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.110036] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9543612b-edb4-4ef3-b32d-0753254a37e7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.114947] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.119149] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2002.119381] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3382c96b-3a34-4c59-8ee3-7b3afe7d9a47 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.124949] env[62820]: DEBUG oslo_vmware.api [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 2002.124949] env[62820]: value = "task-1696717" [ 2002.124949] env[62820]: _type = "Task" [ 2002.124949] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.132677] env[62820]: DEBUG oslo_vmware.api [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696717, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.511928] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.514209] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.399s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.515725] env[62820]: INFO nova.compute.claims [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2002.530378] env[62820]: INFO nova.scheduler.client.report [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Deleted allocations for instance 72cdf2b2-fb69-4820-a663-56bfe92572d2 [ 2002.635372] env[62820]: DEBUG oslo_vmware.api [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696717, 'name': PowerOffVM_Task, 'duration_secs': 0.196963} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.635625] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2002.635790] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2002.636079] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db1f6919-ab53-431a-8c8f-08106f949a26 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.726475] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2002.726725] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2002.726914] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleting the datastore file [datastore1] c33b3040-b93e-43f7-ab00-e29e8a307d0b {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2002.727198] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58e57e7c-a6e7-4044-a403-577bb6bc7942 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.733418] env[62820]: DEBUG oslo_vmware.api [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for the task: (returnval){ [ 2002.733418] env[62820]: value = "task-1696719" [ 2002.733418] env[62820]: _type = "Task" [ 2002.733418] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.741440] env[62820]: DEBUG oslo_vmware.api [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.036511] env[62820]: DEBUG oslo_concurrency.lockutils [None req-25cc8c0e-2672-4030-ad8b-7a71095799ed tempest-ServerActionsTestOtherB-567149294 tempest-ServerActionsTestOtherB-567149294-project-member] Lock "72cdf2b2-fb69-4820-a663-56bfe92572d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.890s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.117764] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2003.118066] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e53f081c-9900-4e7e-9bfd-4ea6fd5dbf0d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.125420] env[62820]: DEBUG oslo_vmware.api [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 2003.125420] env[62820]: value = "task-1696720" [ 2003.125420] env[62820]: _type = "Task" [ 2003.125420] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.133022] env[62820]: DEBUG oslo_vmware.api [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.243862] env[62820]: DEBUG oslo_vmware.api [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Task: {'id': task-1696719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123977} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.243862] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2003.244184] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2003.244427] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2003.244626] env[62820]: INFO nova.compute.manager [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2003.244873] env[62820]: DEBUG oslo.service.loopingcall [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2003.245077] env[62820]: DEBUG nova.compute.manager [-] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2003.245184] env[62820]: DEBUG nova.network.neutron [-] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2003.608332] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cc7bd6-d8e3-4f6a-be85-a2b2d8374293 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.615881] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab89a7b-0742-4a68-8f37-82ef7592d4d3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.649853] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffb54ad-de37-4280-8af3-a836cd830a25 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.657285] env[62820]: DEBUG oslo_vmware.api [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696720, 'name': PowerOffVM_Task, 'duration_secs': 0.155875} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.659292] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2003.659564] env[62820]: DEBUG nova.compute.manager [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2003.660407] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce2bff2-cc00-4a7e-8d25-084cf2a12762 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.663711] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a4f506-2970-4767-8c88-1cd73ce63ab5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.680232] env[62820]: DEBUG nova.compute.provider_tree [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2003.729056] env[62820]: DEBUG nova.compute.manager [req-c7221e51-b98b-4959-9b03-71c16ad9c9a6 req-3d1bfb46-3c63-4d08-bd60-d926fa752236 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Received event network-vif-deleted-857882cd-2832-40d3-9537-cb5042e3808a {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2003.729270] env[62820]: INFO nova.compute.manager [req-c7221e51-b98b-4959-9b03-71c16ad9c9a6 req-3d1bfb46-3c63-4d08-bd60-d926fa752236 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Neutron deleted interface 857882cd-2832-40d3-9537-cb5042e3808a; detaching it from the instance and deleting it from the info cache [ 2003.729453] env[62820]: DEBUG nova.network.neutron [req-c7221e51-b98b-4959-9b03-71c16ad9c9a6 req-3d1bfb46-3c63-4d08-bd60-d926fa752236 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.187331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-9febf431-d9ec-4695-aa30-a5d54ce9ed40 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.090s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.205951] env[62820]: ERROR nova.scheduler.client.report [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [req-7a448c17-c8c7-4d0c-ad50-78cd01e97cc7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 8a0693d4-1456-4a04-ae15-b1eaea0edd7a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7a448c17-c8c7-4d0c-ad50-78cd01e97cc7"}]} [ 2004.208605] env[62820]: DEBUG nova.network.neutron [-] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.232183] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e7d80d1-2100-4e02-bd75-c6f5994fcbcc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.242288] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce93e7f4-69d6-4077-afe6-9322558f899a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.270645] env[62820]: DEBUG nova.compute.manager [req-c7221e51-b98b-4959-9b03-71c16ad9c9a6 req-3d1bfb46-3c63-4d08-bd60-d926fa752236 service nova] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Detach interface failed, port_id=857882cd-2832-40d3-9537-cb5042e3808a, reason: Instance c33b3040-b93e-43f7-ab00-e29e8a307d0b could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 2004.419673] env[62820]: DEBUG nova.scheduler.client.report [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Refreshing inventories for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2004.435807] env[62820]: DEBUG nova.scheduler.client.report [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Updating ProviderTree inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2004.436058] env[62820]: DEBUG nova.compute.provider_tree [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2004.447903] env[62820]: DEBUG nova.scheduler.client.report [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Refreshing aggregate associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, aggregates: None {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2004.467072] env[62820]: DEBUG nova.scheduler.client.report [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Refreshing trait associations for resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62820) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2004.483267] env[62820]: DEBUG nova.objects.instance [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'flavor' on Instance uuid ab74220e-d9c4-4c96-a38d-9935dd3e13c0 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2004.557664] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d27fb4-338a-4321-ac2a-08b17a702c77 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.565835] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4866c819-e1df-4fc6-b606-83ffc6852cf3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.595975] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e62ddc7-f379-42ae-96f1-f195b77da274 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.604244] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed33fa3-896a-41b9-87b1-af411f5df041 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.617213] env[62820]: DEBUG nova.compute.provider_tree [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2004.905558] env[62820]: INFO nova.compute.manager [-] [instance: c33b3040-b93e-43f7-ab00-e29e8a307d0b] Took 1.66 seconds to deallocate network for instance. [ 2004.989988] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.990208] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.990371] env[62820]: DEBUG nova.network.neutron [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2004.990549] env[62820]: DEBUG nova.objects.instance [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'info_cache' on Instance uuid ab74220e-d9c4-4c96-a38d-9935dd3e13c0 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2005.146007] env[62820]: DEBUG nova.scheduler.client.report [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 189 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2005.146269] env[62820]: DEBUG nova.compute.provider_tree [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 189 to 190 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2005.146449] env[62820]: DEBUG nova.compute.provider_tree [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2005.412368] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.493473] env[62820]: DEBUG nova.objects.base [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62820) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2005.651784] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.137s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.652329] env[62820]: DEBUG nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Start building networks asynchronously for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2005.655076] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.243s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.655299] env[62820]: DEBUG nova.objects.instance [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lazy-loading 'resources' on Instance uuid c33b3040-b93e-43f7-ab00-e29e8a307d0b {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2006.158127] env[62820]: DEBUG nova.compute.utils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2006.163022] env[62820]: DEBUG nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Allocating IP information in the background. {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2006.163022] env[62820]: DEBUG nova.network.neutron [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] allocate_for_instance() {{(pid=62820) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2006.241311] env[62820]: DEBUG nova.policy [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '183f339671f54844bee09459976816ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f214ed24ef014d32bfaea02a7174b912', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62820) authorize /opt/stack/nova/nova/policy.py:192}} [ 2006.244362] env[62820]: DEBUG nova.network.neutron [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updating instance_info_cache with network_info: [{"id": "10550a85-a1ac-4990-b2e8-34972567d45b", "address": "fa:16:3e:2e:d9:60", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10550a85-a1", "ovs_interfaceid": "10550a85-a1ac-4990-b2e8-34972567d45b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.265669] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d252ca5-4a0b-453e-916c-e505fadc8ebe {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.276778] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f29606-d7a5-4086-8b02-71f9ff6d72da {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.313026] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e05134a-2263-42a1-99f3-9cdd89db6fe5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.321071] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c1c260-ae4c-4edf-8bfb-f7cd3b6e4f60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.338857] env[62820]: DEBUG nova.compute.provider_tree [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2006.540486] env[62820]: DEBUG nova.network.neutron [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Successfully created port: 256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2006.662623] env[62820]: DEBUG nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Start building block device mappings for instance. {{(pid=62820) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2006.747065] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.845177] env[62820]: DEBUG nova.scheduler.client.report [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2007.350111] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.373272] env[62820]: INFO nova.scheduler.client.report [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Deleted allocations for instance c33b3040-b93e-43f7-ab00-e29e8a307d0b [ 2007.677027] env[62820]: DEBUG nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Start spawning the instance on the hypervisor. {{(pid=62820) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2007.702954] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T16:35:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T16:35:10Z,direct_url=,disk_format='vmdk',id=b17619ac-779a-4463-ab94-4bb0b9ba63c1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22945fad30bb46e69a75536b22c2f833',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T16:35:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2007.703265] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Flavor limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2007.703428] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Image limits 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2007.703736] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Flavor pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2007.703901] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Image pref 0:0:0 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2007.704068] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62820) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2007.704288] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2007.704449] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2007.704619] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Got 1 possible topologies {{(pid=62820) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2007.704781] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2007.704950] env[62820]: DEBUG nova.virt.hardware [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62820) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2007.705814] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5312089b-ed2e-4e9c-b13e-1a6e1aec20a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.715105] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27211a5d-b1e7-465e-8e72-78c444e7c5b1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.753757] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2007.754037] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a934e01-b973-4a95-9024-af88f7714d79 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.822295] env[62820]: DEBUG oslo_vmware.api [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 2007.822295] env[62820]: value = "task-1696722" [ 2007.822295] env[62820]: _type = "Task" [ 2007.822295] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.830781] env[62820]: DEBUG oslo_vmware.api [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.879864] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c6f8f864-741d-4f3d-b0d6-a87309930611 tempest-AttachVolumeNegativeTest-1173002150 tempest-AttachVolumeNegativeTest-1173002150-project-member] Lock "c33b3040-b93e-43f7-ab00-e29e8a307d0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.278s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.965325] env[62820]: DEBUG nova.compute.manager [req-19a90586-561d-4d71-8ec5-2757352f5ece req-9cbc2acb-5301-43b8-a759-d7e1f7933ab3 service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Received event network-vif-plugged-256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2007.965553] env[62820]: DEBUG oslo_concurrency.lockutils [req-19a90586-561d-4d71-8ec5-2757352f5ece req-9cbc2acb-5301-43b8-a759-d7e1f7933ab3 service nova] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.965795] env[62820]: DEBUG oslo_concurrency.lockutils [req-19a90586-561d-4d71-8ec5-2757352f5ece req-9cbc2acb-5301-43b8-a759-d7e1f7933ab3 service nova] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.965965] env[62820]: DEBUG oslo_concurrency.lockutils [req-19a90586-561d-4d71-8ec5-2757352f5ece req-9cbc2acb-5301-43b8-a759-d7e1f7933ab3 service nova] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.967058] env[62820]: DEBUG nova.compute.manager [req-19a90586-561d-4d71-8ec5-2757352f5ece req-9cbc2acb-5301-43b8-a759-d7e1f7933ab3 service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] No waiting events found dispatching network-vif-plugged-256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2007.967058] env[62820]: WARNING nova.compute.manager [req-19a90586-561d-4d71-8ec5-2757352f5ece req-9cbc2acb-5301-43b8-a759-d7e1f7933ab3 service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Received unexpected event network-vif-plugged-256b7e4b-55e2-4fef-9850-258ee7dc7bc1 for instance with vm_state building and task_state spawning. [ 2008.051694] env[62820]: DEBUG nova.network.neutron [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Successfully updated port: 256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2008.332955] env[62820]: DEBUG oslo_vmware.api [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696722, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.553292] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.553617] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.553667] env[62820]: DEBUG nova.network.neutron [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2008.834358] env[62820]: DEBUG oslo_vmware.api [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696722, 'name': PowerOnVM_Task, 'duration_secs': 0.636516} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.834619] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2008.834819] env[62820]: DEBUG nova.compute.manager [None req-ab20c5cd-6f78-4d92-8f72-64839750bd65 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2008.835570] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8998dd4c-ed85-4b7a-8fb6-475d0ed956c5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.091249] env[62820]: DEBUG nova.network.neutron [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Instance cache missing network info. {{(pid=62820) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2009.224340] env[62820]: DEBUG nova.network.neutron [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updating instance_info_cache with network_info: [{"id": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "address": "fa:16:3e:5d:3e:9e", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap256b7e4b-55", "ovs_interfaceid": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.727208] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Releasing lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.727496] env[62820]: DEBUG nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Instance network_info: |[{"id": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "address": "fa:16:3e:5d:3e:9e", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap256b7e4b-55", "ovs_interfaceid": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62820) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2009.728878] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:3e:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ff3ecd2f-0b10-4faf-a512-fd7a20c28df1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '256b7e4b-55e2-4fef-9850-258ee7dc7bc1', 'vif_model': 'vmxnet3'}] {{(pid=62820) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2009.735240] env[62820]: DEBUG oslo.service.loopingcall [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2009.735470] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Creating VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2009.735720] env[62820]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-724b3831-d2c3-4e7c-8018-ac407e80fc85 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.755265] env[62820]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2009.755265] env[62820]: value = "task-1696724" [ 2009.755265] env[62820]: _type = "Task" [ 2009.755265] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.762860] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696724, 'name': CreateVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.999008] env[62820]: DEBUG nova.compute.manager [req-2d3e7f13-2607-4ea7-9ba4-6096eb0c3c6a req-2b7e4888-57a8-4bc7-8941-2be4c033be61 service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Received event network-changed-256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2009.999240] env[62820]: DEBUG nova.compute.manager [req-2d3e7f13-2607-4ea7-9ba4-6096eb0c3c6a req-2b7e4888-57a8-4bc7-8941-2be4c033be61 service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Refreshing instance network info cache due to event network-changed-256b7e4b-55e2-4fef-9850-258ee7dc7bc1. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 2009.999443] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d3e7f13-2607-4ea7-9ba4-6096eb0c3c6a req-2b7e4888-57a8-4bc7-8941-2be4c033be61 service nova] Acquiring lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.999585] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d3e7f13-2607-4ea7-9ba4-6096eb0c3c6a req-2b7e4888-57a8-4bc7-8941-2be4c033be61 service nova] Acquired lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.999746] env[62820]: DEBUG nova.network.neutron [req-2d3e7f13-2607-4ea7-9ba4-6096eb0c3c6a req-2b7e4888-57a8-4bc7-8941-2be4c033be61 service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Refreshing network info cache for port 256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2010.266098] env[62820]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696724, 'name': CreateVM_Task, 'duration_secs': 0.369808} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.266277] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Created VM on the ESX host {{(pid=62820) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2010.266951] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.267132] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.267504] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2010.267760] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-417c1e75-0c79-4594-aeca-558422964ad7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.272584] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2010.272584] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]52e96e47-9b4e-b891-65e5-03c9f6ca807f" [ 2010.272584] env[62820]: _type = "Task" [ 2010.272584] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.280109] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e96e47-9b4e-b891-65e5-03c9f6ca807f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.632766] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ee91e4-9ed6-4b64-9a86-28a95c53047c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.639585] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5c24aeed-fe75-46d9-b75e-7843a586b7c8 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Suspending the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2010.639807] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d9452b7f-8804-4fd0-8d94-32b765d68136 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.646508] env[62820]: DEBUG oslo_vmware.api [None req-5c24aeed-fe75-46d9-b75e-7843a586b7c8 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 2010.646508] env[62820]: value = "task-1696725" [ 2010.646508] env[62820]: _type = "Task" [ 2010.646508] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.654708] env[62820]: DEBUG oslo_vmware.api [None req-5c24aeed-fe75-46d9-b75e-7843a586b7c8 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696725, 'name': SuspendVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.685849] env[62820]: DEBUG nova.network.neutron [req-2d3e7f13-2607-4ea7-9ba4-6096eb0c3c6a req-2b7e4888-57a8-4bc7-8941-2be4c033be61 service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updated VIF entry in instance network info cache for port 256b7e4b-55e2-4fef-9850-258ee7dc7bc1. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2010.686221] env[62820]: DEBUG nova.network.neutron [req-2d3e7f13-2607-4ea7-9ba4-6096eb0c3c6a req-2b7e4888-57a8-4bc7-8941-2be4c033be61 service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updating instance_info_cache with network_info: [{"id": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "address": "fa:16:3e:5d:3e:9e", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap256b7e4b-55", "ovs_interfaceid": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2010.784276] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]52e96e47-9b4e-b891-65e5-03c9f6ca807f, 'name': SearchDatastore_Task, 'duration_secs': 0.011625} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.784707] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2010.784817] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Processing image b17619ac-779a-4463-ab94-4bb0b9ba63c1 {{(pid=62820) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2010.785056] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.785223] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.785379] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2010.785657] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1719f730-34ec-4ec7-9252-8805b8524ea1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.794641] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62820) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2010.794896] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62820) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2010.795802] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feaf463a-1f7a-45fc-8a20-8df12d738b31 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.802069] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2010.802069] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]525f590a-d76e-da80-ddbd-8a5b19df6a5e" [ 2010.802069] env[62820]: _type = "Task" [ 2010.802069] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.811209] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525f590a-d76e-da80-ddbd-8a5b19df6a5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.156078] env[62820]: DEBUG oslo_vmware.api [None req-5c24aeed-fe75-46d9-b75e-7843a586b7c8 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696725, 'name': SuspendVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.189279] env[62820]: DEBUG oslo_concurrency.lockutils [req-2d3e7f13-2607-4ea7-9ba4-6096eb0c3c6a req-2b7e4888-57a8-4bc7-8941-2be4c033be61 service nova] Releasing lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.311382] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]525f590a-d76e-da80-ddbd-8a5b19df6a5e, 'name': SearchDatastore_Task, 'duration_secs': 0.013394} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.312195] env[62820]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a48c50b-4a62-4abc-bef5-da392b95d4e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.318455] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2011.318455] env[62820]: value = "session[5263da33-e147-45e9-71e6-fd449b37f057]527d9442-e7d9-0233-7c58-429daa81425d" [ 2011.318455] env[62820]: _type = "Task" [ 2011.318455] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.326155] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527d9442-e7d9-0233-7c58-429daa81425d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.656549] env[62820]: DEBUG oslo_vmware.api [None req-5c24aeed-fe75-46d9-b75e-7843a586b7c8 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696725, 'name': SuspendVM_Task, 'duration_secs': 0.607662} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.656878] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-5c24aeed-fe75-46d9-b75e-7843a586b7c8 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Suspended the VM {{(pid=62820) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2011.657117] env[62820]: DEBUG nova.compute.manager [None req-5c24aeed-fe75-46d9-b75e-7843a586b7c8 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2011.657887] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507c0a57-34e5-4543-8ff7-0c00c2082df6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.829292] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': session[5263da33-e147-45e9-71e6-fd449b37f057]527d9442-e7d9-0233-7c58-429daa81425d, 'name': SearchDatastore_Task, 'duration_secs': 0.012578} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.829671] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.829778] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418/8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2011.830036] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d4af58e-97a1-4366-815b-a05d82abf670 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.836902] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2011.836902] env[62820]: value = "task-1696727" [ 2011.836902] env[62820]: _type = "Task" [ 2011.836902] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.843971] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.348235] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442929} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.348235] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b17619ac-779a-4463-ab94-4bb0b9ba63c1/b17619ac-779a-4463-ab94-4bb0b9ba63c1.vmdk to [datastore1] 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418/8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418.vmdk {{(pid=62820) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2012.348235] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Extending root virtual disk to 1048576 {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2012.348235] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5ee0eed-d9bc-463c-9c73-5916bc9d88ac {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.354292] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2012.354292] env[62820]: value = "task-1696728" [ 2012.354292] env[62820]: _type = "Task" [ 2012.354292] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.362253] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696728, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.864018] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696728, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081459} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.864358] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Extended root virtual disk {{(pid=62820) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2012.864971] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7f66b6-8a79-42f3-85e0-6d77e8b15f66 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.886038] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418/8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2012.886261] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-750f17fa-f804-4374-a916-4eb325e6a149 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.904972] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2012.904972] env[62820]: value = "task-1696729" [ 2012.904972] env[62820]: _type = "Task" [ 2012.904972] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.907819] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "09d7f053-df0e-428a-98a4-a18d70c0158e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.908085] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.908348] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.908516] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.908682] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.910990] env[62820]: INFO nova.compute.manager [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Terminating instance [ 2012.915196] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696729, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.992515] env[62820]: INFO nova.compute.manager [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Resuming [ 2012.993138] env[62820]: DEBUG nova.objects.instance [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'flavor' on Instance uuid ab74220e-d9c4-4c96-a38d-9935dd3e13c0 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2013.416042] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696729, 'name': ReconfigVM_Task, 'duration_secs': 0.257897} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.416042] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418/8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418.vmdk or device None with type sparse {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2013.416517] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-664cd803-6c8e-4137-bc8e-20304727a7e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.418528] env[62820]: DEBUG nova.compute.manager [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2013.418735] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2013.418956] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6b44b53-1191-4ed7-88f4-b6d34ae48eb2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.425285] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 2013.425285] env[62820]: value = "task-1696730" [ 2013.425285] env[62820]: _type = "Task" [ 2013.425285] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.426354] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2013.426354] env[62820]: value = "task-1696731" [ 2013.426354] env[62820]: _type = "Task" [ 2013.426354] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.436556] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696730, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.439495] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696731, 'name': Rename_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.940183] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696731, 'name': Rename_Task, 'duration_secs': 0.151037} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.943210] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Powering on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2013.943496] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696730, 'name': PowerOffVM_Task, 'duration_secs': 0.172014} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.943687] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95e4b6ca-d691-46a5-858e-04b5106f01e9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.945233] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2013.945443] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2013.945648] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353706', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'name': 'volume-02dc1a68-bb21-4b33-8d03-0d369092773d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '09d7f053-df0e-428a-98a4-a18d70c0158e', 'attached_at': '2024-12-10T16:59:12.000000', 'detached_at': '', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'serial': '02dc1a68-bb21-4b33-8d03-0d369092773d'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2013.946392] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a1d2df-28b8-434c-a7d4-000e6469bfd7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.966478] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c257003a-3acf-4195-910c-30a9d4872df4 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.968889] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2013.968889] env[62820]: value = "task-1696732" [ 2013.968889] env[62820]: _type = "Task" [ 2013.968889] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.974333] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63bf7677-a314-4368-8123-fb9b9d3a89ca {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.979210] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696732, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.994210] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844aef91-8d8f-43a5-bff8-bc44d1311143 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.009697] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] The volume has not been displaced from its original location: [datastore1] volume-02dc1a68-bb21-4b33-8d03-0d369092773d/volume-02dc1a68-bb21-4b33-8d03-0d369092773d.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2014.014890] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2014.015390] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b130bf23-564e-4c1f-974b-6434ae275788 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.033023] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 2014.033023] env[62820]: value = "task-1696733" [ 2014.033023] env[62820]: _type = "Task" [ 2014.033023] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.040879] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696733, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.479382] env[62820]: DEBUG oslo_vmware.api [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696732, 'name': PowerOnVM_Task, 'duration_secs': 0.438898} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.479650] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Powered on the VM {{(pid=62820) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2014.479851] env[62820]: INFO nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Took 6.80 seconds to spawn the instance on the hypervisor. [ 2014.480040] env[62820]: DEBUG nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2014.480942] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62de5722-0426-4d95-bd83-417c8c8df7cc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.516352] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.517088] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquired lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.517088] env[62820]: DEBUG nova.network.neutron [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Building network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2014.542130] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696733, 'name': ReconfigVM_Task, 'duration_secs': 0.183242} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.542703] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2014.547427] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f60387a3-e564-4bde-aae3-cb99668325bb {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.562386] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 2014.562386] env[62820]: value = "task-1696735" [ 2014.562386] env[62820]: _type = "Task" [ 2014.562386] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.569959] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696735, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.997835] env[62820]: INFO nova.compute.manager [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Took 12.90 seconds to build instance. [ 2015.073364] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696735, 'name': ReconfigVM_Task, 'duration_secs': 0.142279} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.073672] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353706', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'name': 'volume-02dc1a68-bb21-4b33-8d03-0d369092773d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '09d7f053-df0e-428a-98a4-a18d70c0158e', 'attached_at': '2024-12-10T16:59:12.000000', 'detached_at': '', 'volume_id': '02dc1a68-bb21-4b33-8d03-0d369092773d', 'serial': '02dc1a68-bb21-4b33-8d03-0d369092773d'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2015.073953] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2015.074721] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450414ed-c21a-4a72-8c94-f3359fb83a83 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.081164] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2015.081388] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26c000ae-c9b7-4e69-852e-a548619d452a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.160213] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2015.160475] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2015.160679] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleting the datastore file [datastore1] 09d7f053-df0e-428a-98a4-a18d70c0158e {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2015.160936] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bed83f0-9b83-474e-8def-d19f32795962 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.169476] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 2015.169476] env[62820]: value = "task-1696737" [ 2015.169476] env[62820]: _type = "Task" [ 2015.169476] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.176691] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.381781] env[62820]: DEBUG nova.network.neutron [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updating instance_info_cache with network_info: [{"id": "10550a85-a1ac-4990-b2e8-34972567d45b", "address": "fa:16:3e:2e:d9:60", "network": {"id": "e9dbd812-73d8-4765-97f0-15bfc7562a0c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1589765870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57b0c64a8704e7aaeba4011866c7a24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10550a85-a1", "ovs_interfaceid": "10550a85-a1ac-4990-b2e8-34972567d45b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.478092] env[62820]: DEBUG nova.compute.manager [req-2aab1fc0-d38a-4e2d-bdda-5375fc60c420 req-3412235d-453b-4825-ba40-4c132e9db58d service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Received event network-changed-256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2015.478092] env[62820]: DEBUG nova.compute.manager [req-2aab1fc0-d38a-4e2d-bdda-5375fc60c420 req-3412235d-453b-4825-ba40-4c132e9db58d service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Refreshing instance network info cache due to event network-changed-256b7e4b-55e2-4fef-9850-258ee7dc7bc1. {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11494}} [ 2015.478245] env[62820]: DEBUG oslo_concurrency.lockutils [req-2aab1fc0-d38a-4e2d-bdda-5375fc60c420 req-3412235d-453b-4825-ba40-4c132e9db58d service nova] Acquiring lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2015.478414] env[62820]: DEBUG oslo_concurrency.lockutils [req-2aab1fc0-d38a-4e2d-bdda-5375fc60c420 req-3412235d-453b-4825-ba40-4c132e9db58d service nova] Acquired lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.478614] env[62820]: DEBUG nova.network.neutron [req-2aab1fc0-d38a-4e2d-bdda-5375fc60c420 req-3412235d-453b-4825-ba40-4c132e9db58d service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Refreshing network info cache for port 256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2015.499671] env[62820]: DEBUG oslo_concurrency.lockutils [None req-e7919268-021b-4c16-8dfc-561c198a70c8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.413s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.679299] env[62820]: DEBUG oslo_vmware.api [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100424} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.679562] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2015.679746] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2015.679920] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2015.680108] env[62820]: INFO nova.compute.manager [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Took 2.26 seconds to destroy the instance on the hypervisor. [ 2015.680403] env[62820]: DEBUG oslo.service.loopingcall [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2015.680601] env[62820]: DEBUG nova.compute.manager [-] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2015.680698] env[62820]: DEBUG nova.network.neutron [-] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2015.890275] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Releasing lock "refresh_cache-ab74220e-d9c4-4c96-a38d-9935dd3e13c0" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.890275] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cb92b2-3bd6-4224-8501-d511e9f8ca64 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.897064] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Resuming the VM {{(pid=62820) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2015.897307] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea0eabd6-1dff-42ed-9dc3-b3fabb04749c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.903175] env[62820]: DEBUG oslo_vmware.api [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 2015.903175] env[62820]: value = "task-1696738" [ 2015.903175] env[62820]: _type = "Task" [ 2015.903175] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.916995] env[62820]: DEBUG oslo_vmware.api [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.331649] env[62820]: DEBUG nova.network.neutron [req-2aab1fc0-d38a-4e2d-bdda-5375fc60c420 req-3412235d-453b-4825-ba40-4c132e9db58d service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updated VIF entry in instance network info cache for port 256b7e4b-55e2-4fef-9850-258ee7dc7bc1. {{(pid=62820) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2016.332087] env[62820]: DEBUG nova.network.neutron [req-2aab1fc0-d38a-4e2d-bdda-5375fc60c420 req-3412235d-453b-4825-ba40-4c132e9db58d service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updating instance_info_cache with network_info: [{"id": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "address": "fa:16:3e:5d:3e:9e", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap256b7e4b-55", "ovs_interfaceid": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.415425] env[62820]: DEBUG oslo_vmware.api [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696738, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.774094] env[62820]: DEBUG nova.network.neutron [-] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.834408] env[62820]: DEBUG oslo_concurrency.lockutils [req-2aab1fc0-d38a-4e2d-bdda-5375fc60c420 req-3412235d-453b-4825-ba40-4c132e9db58d service nova] Releasing lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.915031] env[62820]: DEBUG oslo_vmware.api [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696738, 'name': PowerOnVM_Task, 'duration_secs': 0.564897} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.915309] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Resumed the VM {{(pid=62820) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2016.915550] env[62820]: DEBUG nova.compute.manager [None req-0abe8b76-0c07-445d-b454-d6023838d5b5 tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Checking state {{(pid=62820) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2016.916501] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2113dc-4142-41fe-8050-385db91aae3e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.275434] env[62820]: INFO nova.compute.manager [-] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Took 1.59 seconds to deallocate network for instance. [ 2017.502885] env[62820]: DEBUG nova.compute.manager [req-2dbe052a-9a43-48f9-b200-8fd840d2758d req-b51badb4-bbbc-4e64-9c5e-7bfb40e9b95f service nova] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Received event network-vif-deleted-32f96b87-6a60-4c4f-877b-3ab110787004 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2017.750809] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.751097] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.751312] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.751578] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.751827] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.753941] env[62820]: INFO nova.compute.manager [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Terminating instance [ 2017.818038] env[62820]: INFO nova.compute.manager [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Took 0.54 seconds to detach 1 volumes for instance. [ 2017.820143] env[62820]: DEBUG nova.compute.manager [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 09d7f053-df0e-428a-98a4-a18d70c0158e] Deleting volume: 02dc1a68-bb21-4b33-8d03-0d369092773d {{(pid=62820) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2018.257872] env[62820]: DEBUG nova.compute.manager [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2018.258165] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2018.259121] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da39910-f1d2-4eca-8494-5bf484af4392 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.266877] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2018.267126] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a25bca19-77eb-4a0e-94ce-58bd3bbb4101 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.272313] env[62820]: DEBUG oslo_vmware.api [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 2018.272313] env[62820]: value = "task-1696740" [ 2018.272313] env[62820]: _type = "Task" [ 2018.272313] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.279906] env[62820]: DEBUG oslo_vmware.api [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.360254] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.360551] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.360748] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.387962] env[62820]: INFO nova.scheduler.client.report [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted allocations for instance 09d7f053-df0e-428a-98a4-a18d70c0158e [ 2018.783500] env[62820]: DEBUG oslo_vmware.api [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696740, 'name': PowerOffVM_Task, 'duration_secs': 0.170745} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.783500] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2018.783500] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2018.783500] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0abbc5d1-9114-4f3a-acda-5aa2cc264b4b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.863131] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2018.863348] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2018.863534] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleting the datastore file [datastore1] ab74220e-d9c4-4c96-a38d-9935dd3e13c0 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2018.863792] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3ccec39-6520-41e1-9cf2-d5a814f0275a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.870573] env[62820]: DEBUG oslo_vmware.api [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for the task: (returnval){ [ 2018.870573] env[62820]: value = "task-1696742" [ 2018.870573] env[62820]: _type = "Task" [ 2018.870573] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.877929] env[62820]: DEBUG oslo_vmware.api [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.900259] env[62820]: DEBUG oslo_concurrency.lockutils [None req-1d754a1f-694e-44da-a976-ed88a73a1aeb tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "09d7f053-df0e-428a-98a4-a18d70c0158e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.992s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.368445] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.368699] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.368853] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 2019.368972] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Rebuilding the list of instances to heal {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10290}} [ 2019.380992] env[62820]: DEBUG oslo_vmware.api [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Task: {'id': task-1696742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145747} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.380992] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2019.381176] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2019.381308] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2019.381513] env[62820]: INFO nova.compute.manager [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2019.381827] env[62820]: DEBUG oslo.service.loopingcall [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2019.382042] env[62820]: DEBUG nova.compute.manager [-] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2019.382152] env[62820]: DEBUG nova.network.neutron [-] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2019.745939] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "7a923678-5eea-4149-9a6d-0594fdb532c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.746226] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.746436] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "7a923678-5eea-4149-9a6d-0594fdb532c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.746618] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.746784] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.748860] env[62820]: INFO nova.compute.manager [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Terminating instance [ 2019.803429] env[62820]: DEBUG nova.compute.manager [req-38d0d5e9-6716-434d-81e7-3b7eb5fb4192 req-62128763-bbf5-4218-9d58-923095650ab9 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Received event network-vif-deleted-10550a85-a1ac-4990-b2e8-34972567d45b {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2019.803429] env[62820]: INFO nova.compute.manager [req-38d0d5e9-6716-434d-81e7-3b7eb5fb4192 req-62128763-bbf5-4218-9d58-923095650ab9 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Neutron deleted interface 10550a85-a1ac-4990-b2e8-34972567d45b; detaching it from the instance and deleting it from the info cache [ 2019.803777] env[62820]: DEBUG nova.network.neutron [req-38d0d5e9-6716-434d-81e7-3b7eb5fb4192 req-62128763-bbf5-4218-9d58-923095650ab9 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.542146] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Skipping network cache update for instance because it is being deleted. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10303}} [ 2020.542355] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Skipping network cache update for instance because it is being deleted. {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10303}} [ 2020.544691] env[62820]: DEBUG nova.compute.manager [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2020.544891] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2020.545170] env[62820]: DEBUG nova.network.neutron [-] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.550025] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d992976-a5a4-49f2-a4b3-55a51d6ddafc {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.550025] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69c96f14-a7df-48f3-a946-42ac6833a0c7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.557205] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2020.558467] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88f6603d-01e2-43bc-a29d-5ff1f1277153 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.562378] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a120f592-0d39-4fa9-9a1a-c650d173cb2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.578255] env[62820]: DEBUG oslo_vmware.api [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 2020.578255] env[62820]: value = "task-1696743" [ 2020.578255] env[62820]: _type = "Task" [ 2020.578255] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.578932] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2020.579073] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2020.579215] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2020.579362] env[62820]: DEBUG nova.objects.instance [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lazy-loading 'info_cache' on Instance uuid 15b6eda1-db87-45d1-a0c6-320386b02e12 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2020.588973] env[62820]: DEBUG nova.compute.manager [req-38d0d5e9-6716-434d-81e7-3b7eb5fb4192 req-62128763-bbf5-4218-9d58-923095650ab9 service nova] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Detach interface failed, port_id=10550a85-a1ac-4990-b2e8-34972567d45b, reason: Instance ab74220e-d9c4-4c96-a38d-9935dd3e13c0 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 2020.595053] env[62820]: DEBUG oslo_vmware.api [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.052562] env[62820]: INFO nova.compute.manager [-] [instance: ab74220e-d9c4-4c96-a38d-9935dd3e13c0] Took 1.67 seconds to deallocate network for instance. [ 2021.091942] env[62820]: DEBUG oslo_vmware.api [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696743, 'name': PowerOffVM_Task, 'duration_secs': 0.205917} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.092375] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2021.092549] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2021.092795] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6ef2b4a-05c6-4939-877c-c78cbc9c8034 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.165308] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2021.165565] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2021.165713] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleting the datastore file [datastore1] 7a923678-5eea-4149-9a6d-0594fdb532c8 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2021.166250] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d5d79ac-0710-415f-9369-61d3707039ee {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.172901] env[62820]: DEBUG oslo_vmware.api [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 2021.172901] env[62820]: value = "task-1696745" [ 2021.172901] env[62820]: _type = "Task" [ 2021.172901] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.180336] env[62820]: DEBUG oslo_vmware.api [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.559355] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.559683] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.559910] env[62820]: DEBUG nova.objects.instance [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lazy-loading 'resources' on Instance uuid ab74220e-d9c4-4c96-a38d-9935dd3e13c0 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2021.682761] env[62820]: DEBUG oslo_vmware.api [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119232} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.683087] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2021.683282] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2021.683472] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2021.683641] env[62820]: INFO nova.compute.manager [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2021.683872] env[62820]: DEBUG oslo.service.loopingcall [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2021.684070] env[62820]: DEBUG nova.compute.manager [-] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2021.684167] env[62820]: DEBUG nova.network.neutron [-] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2021.902730] env[62820]: DEBUG nova.compute.manager [req-455c6d4b-d676-4ca0-ba4f-ed47981d04d1 req-19436a2c-5c17-444a-978e-89ec387f34b3 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Received event network-vif-deleted-56792423-7b5c-472d-8d0c-85c04d5dfe61 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2021.902897] env[62820]: INFO nova.compute.manager [req-455c6d4b-d676-4ca0-ba4f-ed47981d04d1 req-19436a2c-5c17-444a-978e-89ec387f34b3 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Neutron deleted interface 56792423-7b5c-472d-8d0c-85c04d5dfe61; detaching it from the instance and deleting it from the info cache [ 2021.903081] env[62820]: DEBUG nova.network.neutron [req-455c6d4b-d676-4ca0-ba4f-ed47981d04d1 req-19436a2c-5c17-444a-978e-89ec387f34b3 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.129774] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfd2fe7-d4d5-4750-8652-c4ccd9ca10e8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.137758] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1beb2b-3e94-4d72-a134-8c977b635fbd {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.168514] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d116a12a-0e6a-432b-9f40-29921c7e1908 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.177572] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864bc80d-57a7-432a-918a-2eb967ada583 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.190537] env[62820]: DEBUG nova.compute.provider_tree [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2022.307996] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updating instance_info_cache with network_info: [{"id": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "address": "fa:16:3e:03:5b:10", "network": {"id": "44e72171-e35e-44fa-b60d-da9397f00c88", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1149687139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14768f5b38ea4f6abf5583ce5e4409f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6e11a8-08", "ovs_interfaceid": "4b6e11a8-0891-4efe-bc15-3803f5edc4c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.380963] env[62820]: DEBUG nova.network.neutron [-] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.406400] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03d356b5-6e25-45e7-8782-2de60568ac50 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.416636] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c986ad-658d-477b-beb8-cde1d8b7f93d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.443553] env[62820]: DEBUG nova.compute.manager [req-455c6d4b-d676-4ca0-ba4f-ed47981d04d1 req-19436a2c-5c17-444a-978e-89ec387f34b3 service nova] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Detach interface failed, port_id=56792423-7b5c-472d-8d0c-85c04d5dfe61, reason: Instance 7a923678-5eea-4149-9a6d-0594fdb532c8 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 2022.723398] env[62820]: DEBUG nova.scheduler.client.report [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 190 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2022.723657] env[62820]: DEBUG nova.compute.provider_tree [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 190 to 191 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2022.723932] env[62820]: DEBUG nova.compute.provider_tree [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2022.810840] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-15b6eda1-db87-45d1-a0c6-320386b02e12" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.811792] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 2022.811792] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.811792] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.811792] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.811792] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.811792] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.812107] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.812107] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 2022.812208] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.883764] env[62820]: INFO nova.compute.manager [-] [instance: 7a923678-5eea-4149-9a6d-0594fdb532c8] Took 1.20 seconds to deallocate network for instance. [ 2023.229566] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.670s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.251824] env[62820]: INFO nova.scheduler.client.report [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Deleted allocations for instance ab74220e-d9c4-4c96-a38d-9935dd3e13c0 [ 2023.315347] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.315584] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.315734] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.315941] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2023.316835] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bb41d5-5e81-489d-a3cd-845e606e0500 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.325228] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d256e36d-334c-4965-a277-10b45fe6ed7f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.339098] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9df7410-3d09-4a9b-a7f1-770f4d5c919b {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.345512] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46011d74-8ee6-48ee-8ac4-ea5ca5e2372f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.373612] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180226MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2023.373795] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.373953] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.389798] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.761393] env[62820]: DEBUG oslo_concurrency.lockutils [None req-2fd76869-1674-45ad-95ed-bc4c4adb928c tempest-ServerActionsTestJSON-843758358 tempest-ServerActionsTestJSON-843758358-project-member] Lock "ab74220e-d9c4-4c96-a38d-9935dd3e13c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.010s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.401249] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.401577] env[62820]: WARNING nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 7a923678-5eea-4149-9a6d-0594fdb532c8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2024.401577] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.401719] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2024.401857] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2024.450052] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3152d708-a816-4a61-964a-722c9667800c {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.463617] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8aa875d-25df-4e30-a21b-eb684e15fd4a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.492127] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892b530a-55a5-4a1f-8fd0-6f31d206397d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.498872] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112e8d0a-c037-46d6-804c-ac0310bf385e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.511399] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2025.014146] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2025.520025] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2025.520025] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.145s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.520025] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.130s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.520025] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.543798] env[62820]: INFO nova.scheduler.client.report [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted allocations for instance 7a923678-5eea-4149-9a6d-0594fdb532c8 [ 2026.052630] env[62820]: DEBUG oslo_concurrency.lockutils [None req-d0f51ea9-10fe-4f9a-a31c-652f96934962 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "7a923678-5eea-4149-9a6d-0594fdb532c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.306s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.501352] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "15b6eda1-db87-45d1-a0c6-320386b02e12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.501794] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.501905] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "15b6eda1-db87-45d1-a0c6-320386b02e12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.502114] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.502291] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.504677] env[62820]: INFO nova.compute.manager [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Terminating instance [ 2030.008540] env[62820]: DEBUG nova.compute.manager [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2030.008806] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2030.009744] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d75f903-f77b-4526-9725-db1de7449fb7 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.017440] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2030.017686] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df3a43ae-75a3-448f-89dd-cb5c6d01c834 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.023392] env[62820]: DEBUG oslo_vmware.api [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 2030.023392] env[62820]: value = "task-1696747" [ 2030.023392] env[62820]: _type = "Task" [ 2030.023392] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.031305] env[62820]: DEBUG oslo_vmware.api [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.533348] env[62820]: DEBUG oslo_vmware.api [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696747, 'name': PowerOffVM_Task, 'duration_secs': 0.197945} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.533754] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2030.533754] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2030.534011] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09f97f15-e5cc-4093-ae5d-27f13c68c9d0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.721838] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2030.722093] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2030.722264] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleting the datastore file [datastore1] 15b6eda1-db87-45d1-a0c6-320386b02e12 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2030.722536] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45787bd2-c2b5-46e9-9779-54fcf73a6acf {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.728141] env[62820]: DEBUG oslo_vmware.api [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for the task: (returnval){ [ 2030.728141] env[62820]: value = "task-1696749" [ 2030.728141] env[62820]: _type = "Task" [ 2030.728141] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.735375] env[62820]: DEBUG oslo_vmware.api [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.238652] env[62820]: DEBUG oslo_vmware.api [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Task: {'id': task-1696749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11944} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.238983] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2031.239155] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2031.239331] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2031.239507] env[62820]: INFO nova.compute.manager [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Took 1.23 seconds to destroy the instance on the hypervisor. [ 2031.239752] env[62820]: DEBUG oslo.service.loopingcall [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2031.239945] env[62820]: DEBUG nova.compute.manager [-] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2031.240056] env[62820]: DEBUG nova.network.neutron [-] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2031.523273] env[62820]: DEBUG nova.compute.manager [req-45bff91a-c93c-4bbf-b57f-f2273a22dcf1 req-5303207f-32f6-4943-9df9-d250de81d5dd service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Received event network-vif-deleted-4b6e11a8-0891-4efe-bc15-3803f5edc4c0 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2031.523508] env[62820]: INFO nova.compute.manager [req-45bff91a-c93c-4bbf-b57f-f2273a22dcf1 req-5303207f-32f6-4943-9df9-d250de81d5dd service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Neutron deleted interface 4b6e11a8-0891-4efe-bc15-3803f5edc4c0; detaching it from the instance and deleting it from the info cache [ 2031.523685] env[62820]: DEBUG nova.network.neutron [req-45bff91a-c93c-4bbf-b57f-f2273a22dcf1 req-5303207f-32f6-4943-9df9-d250de81d5dd service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.998606] env[62820]: DEBUG nova.network.neutron [-] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.025766] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5345f124-f9ed-4549-8f33-fabb4d5e5bc3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.035409] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb0ad6e-3b40-4c41-a3d3-63fa8e49add8 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.059620] env[62820]: DEBUG nova.compute.manager [req-45bff91a-c93c-4bbf-b57f-f2273a22dcf1 req-5303207f-32f6-4943-9df9-d250de81d5dd service nova] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Detach interface failed, port_id=4b6e11a8-0891-4efe-bc15-3803f5edc4c0, reason: Instance 15b6eda1-db87-45d1-a0c6-320386b02e12 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 2032.501555] env[62820]: INFO nova.compute.manager [-] [instance: 15b6eda1-db87-45d1-a0c6-320386b02e12] Took 1.26 seconds to deallocate network for instance. [ 2033.007857] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.008248] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.008355] env[62820]: DEBUG nova.objects.instance [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lazy-loading 'resources' on Instance uuid 15b6eda1-db87-45d1-a0c6-320386b02e12 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2033.551910] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57e8b4f-f6c9-4e6a-af71-1e74b885f223 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.559416] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e48f2e8-b872-4f3d-bfcd-54509d3b8ff3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.588590] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c921839-9c9c-4de9-bc62-9536e39dc846 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.596455] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c865c3-2300-45d2-bc85-cfc92154ef46 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.608968] env[62820]: DEBUG nova.compute.provider_tree [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2034.141812] env[62820]: DEBUG nova.scheduler.client.report [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Updated inventory for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with generation 191 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2034.142123] env[62820]: DEBUG nova.compute.provider_tree [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Updating resource provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a generation from 191 to 192 during operation: update_inventory {{(pid=62820) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2034.142375] env[62820]: DEBUG nova.compute.provider_tree [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Updating inventory in ProviderTree for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2034.646983] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.639s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.666577] env[62820]: INFO nova.scheduler.client.report [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Deleted allocations for instance 15b6eda1-db87-45d1-a0c6-320386b02e12 [ 2035.175130] env[62820]: DEBUG oslo_concurrency.lockutils [None req-c3cc8c2a-88c3-482b-9ce4-b379882e6578 tempest-ServerActionsTestOtherA-1421930396 tempest-ServerActionsTestOtherA-1421930396-project-member] Lock "15b6eda1-db87-45d1-a0c6-320386b02e12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.673s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.678896] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.679200] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.182380] env[62820]: DEBUG nova.compute.utils [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2054.685399] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.744143] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2055.744496] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.744669] env[62820]: INFO nova.compute.manager [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Attaching volume d047d84d-d7d3-4a9e-a84f-62c73e2fab30 to /dev/sdb [ 2055.774352] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f6040d-b57e-47fa-8448-20fb1d692344 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.782037] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b244d48-5335-4240-8803-8d50db218871 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.794687] env[62820]: DEBUG nova.virt.block_device [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updating existing volume attachment record: aed9846d-5442-4c9c-9cda-9492b351d406 {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2060.338483] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2060.338753] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353716', 'volume_id': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'name': 'volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418', 'attached_at': '', 'detached_at': '', 'volume_id': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'serial': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2060.339619] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48cdb72-aef8-41ca-9929-6218fd0c01a3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.357109] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e52ac73-d272-4887-9645-ccfe06c1a9f1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.380170] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30/volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2060.380403] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faf78b14-917a-4eca-b444-ff94b6c1d1b6 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.398110] env[62820]: DEBUG oslo_vmware.api [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2060.398110] env[62820]: value = "task-1696752" [ 2060.398110] env[62820]: _type = "Task" [ 2060.398110] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.406050] env[62820]: DEBUG oslo_vmware.api [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696752, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.907838] env[62820]: DEBUG oslo_vmware.api [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696752, 'name': ReconfigVM_Task, 'duration_secs': 0.329623} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.908137] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30/volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2060.912697] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52c9f904-a806-4f36-94bc-3d0e9f0f2e5e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.927361] env[62820]: DEBUG oslo_vmware.api [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2060.927361] env[62820]: value = "task-1696753" [ 2060.927361] env[62820]: _type = "Task" [ 2060.927361] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.935566] env[62820]: DEBUG oslo_vmware.api [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696753, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.437317] env[62820]: DEBUG oslo_vmware.api [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696753, 'name': ReconfigVM_Task, 'duration_secs': 0.175035} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.437618] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353716', 'volume_id': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'name': 'volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418', 'attached_at': '', 'detached_at': '', 'volume_id': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'serial': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2062.474338] env[62820]: DEBUG nova.objects.instance [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2062.981553] env[62820]: DEBUG oslo_concurrency.lockutils [None req-ce92046a-c578-4778-8480-025390b63ac2 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.237s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.779205] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.779483] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.282686] env[62820]: DEBUG nova.compute.utils [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Using /dev/sd instead of None {{(pid=62820) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2064.786194] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.840432] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.840806] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.841013] env[62820]: INFO nova.compute.manager [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Attaching volume 5b2dd21b-c363-49f7-a407-a18c851e91f8 to /dev/sdc [ 2065.869784] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c9578c-3cb7-4436-81f7-1419db680f54 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.876865] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dda1b6c-18b4-4afc-a7b6-edabfedf8be5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.889786] env[62820]: DEBUG nova.virt.block_device [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updating existing volume attachment record: 15f913b2-81d6-4c4b-b42a-f9b72a71c1fe {{(pid=62820) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2070.432170] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Volume attach. Driver type: vmdk {{(pid=62820) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2070.432461] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353717', 'volume_id': '5b2dd21b-c363-49f7-a407-a18c851e91f8', 'name': 'volume-5b2dd21b-c363-49f7-a407-a18c851e91f8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418', 'attached_at': '', 'detached_at': '', 'volume_id': '5b2dd21b-c363-49f7-a407-a18c851e91f8', 'serial': '5b2dd21b-c363-49f7-a407-a18c851e91f8'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2070.433393] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecf8818-d485-47a7-9838-cd5d2abbfdd3 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.449718] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0edee28-f3a9-43c3-8e30-4e10840c68a5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.475638] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-5b2dd21b-c363-49f7-a407-a18c851e91f8/volume-5b2dd21b-c363-49f7-a407-a18c851e91f8.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2070.475865] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee674a12-15d7-4367-9512-24c1640b0d35 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.493394] env[62820]: DEBUG oslo_vmware.api [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2070.493394] env[62820]: value = "task-1696756" [ 2070.493394] env[62820]: _type = "Task" [ 2070.493394] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.500553] env[62820]: DEBUG oslo_vmware.api [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696756, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.003638] env[62820]: DEBUG oslo_vmware.api [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696756, 'name': ReconfigVM_Task, 'duration_secs': 0.317524} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.003909] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-5b2dd21b-c363-49f7-a407-a18c851e91f8/volume-5b2dd21b-c363-49f7-a407-a18c851e91f8.vmdk or device None with type thin {{(pid=62820) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2071.008352] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce7a9afe-439b-4c11-8029-80c8f62b796f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.022867] env[62820]: DEBUG oslo_vmware.api [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2071.022867] env[62820]: value = "task-1696757" [ 2071.022867] env[62820]: _type = "Task" [ 2071.022867] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.030577] env[62820]: DEBUG oslo_vmware.api [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696757, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.533152] env[62820]: DEBUG oslo_vmware.api [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696757, 'name': ReconfigVM_Task, 'duration_secs': 0.135403} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.533489] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353717', 'volume_id': '5b2dd21b-c363-49f7-a407-a18c851e91f8', 'name': 'volume-5b2dd21b-c363-49f7-a407-a18c851e91f8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418', 'attached_at': '', 'detached_at': '', 'volume_id': '5b2dd21b-c363-49f7-a407-a18c851e91f8', 'serial': '5b2dd21b-c363-49f7-a407-a18c851e91f8'} {{(pid=62820) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2072.566688] env[62820]: DEBUG nova.objects.instance [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2073.072954] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0501e818-4803-4ea6-8d79-1fe3a5e46412 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.232s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.352540] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.352724] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2073.855507] env[62820]: INFO nova.compute.manager [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Detaching volume d047d84d-d7d3-4a9e-a84f-62c73e2fab30 [ 2073.884211] env[62820]: INFO nova.virt.block_device [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Attempting to driver detach volume d047d84d-d7d3-4a9e-a84f-62c73e2fab30 from mountpoint /dev/sdb [ 2073.884445] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2073.884626] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353716', 'volume_id': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'name': 'volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418', 'attached_at': '', 'detached_at': '', 'volume_id': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'serial': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2073.885504] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f30dbbc-44ab-4c5a-9739-2880d55ad750 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.910660] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d55191-355b-4710-8f68-7e23d0686f20 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.917767] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1167de54-c12e-49f7-bf59-5d9887de954e {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.939321] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7460b85-9220-47ae-8300-86f8e3fc4184 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.954758] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] The volume has not been displaced from its original location: [datastore1] volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30/volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2073.959785] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2073.960073] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46546b36-23f3-41cc-83b4-40740b45a97f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.980230] env[62820]: DEBUG oslo_vmware.api [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2073.980230] env[62820]: value = "task-1696758" [ 2073.980230] env[62820]: _type = "Task" [ 2073.980230] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.987754] env[62820]: DEBUG oslo_vmware.api [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696758, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.490205] env[62820]: DEBUG oslo_vmware.api [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696758, 'name': ReconfigVM_Task, 'duration_secs': 0.259245} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.490495] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2074.495011] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f2dd2be-2723-4354-881e-e72c12d67e09 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.510619] env[62820]: DEBUG oslo_vmware.api [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2074.510619] env[62820]: value = "task-1696759" [ 2074.510619] env[62820]: _type = "Task" [ 2074.510619] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.518921] env[62820]: DEBUG oslo_vmware.api [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696759, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.828657] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2074.828873] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.020703] env[62820]: DEBUG oslo_vmware.api [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696759, 'name': ReconfigVM_Task, 'duration_secs': 0.137751} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.021132] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353716', 'volume_id': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'name': 'volume-d047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418', 'attached_at': '', 'detached_at': '', 'volume_id': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30', 'serial': 'd047d84d-d7d3-4a9e-a84f-62c73e2fab30'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2075.333371] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.333578] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Starting heal instance info cache {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10286}} [ 2075.562424] env[62820]: DEBUG nova.objects.instance [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2075.877877] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2075.878052] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquired lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2075.878244] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Forcefully refreshing network info cache for instance {{(pid=62820) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2076.569423] env[62820]: DEBUG oslo_concurrency.lockutils [None req-bf1e61c8-61a2-4b78-a93e-6333ae2572e8 tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.216s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.601504] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.601747] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.094664] env[62820]: DEBUG nova.network.neutron [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updating instance_info_cache with network_info: [{"id": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "address": "fa:16:3e:5d:3e:9e", "network": {"id": "23f01f99-59b1-4543-b8e4-8c7e7807ac7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1073645764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f214ed24ef014d32bfaea02a7174b912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap256b7e4b-55", "ovs_interfaceid": "256b7e4b-55e2-4fef-9850-258ee7dc7bc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.104021] env[62820]: INFO nova.compute.manager [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Detaching volume 5b2dd21b-c363-49f7-a407-a18c851e91f8 [ 2077.136082] env[62820]: INFO nova.virt.block_device [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Attempting to driver detach volume 5b2dd21b-c363-49f7-a407-a18c851e91f8 from mountpoint /dev/sdc [ 2077.136333] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Volume detach. Driver type: vmdk {{(pid=62820) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2077.136504] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353717', 'volume_id': '5b2dd21b-c363-49f7-a407-a18c851e91f8', 'name': 'volume-5b2dd21b-c363-49f7-a407-a18c851e91f8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418', 'attached_at': '', 'detached_at': '', 'volume_id': '5b2dd21b-c363-49f7-a407-a18c851e91f8', 'serial': '5b2dd21b-c363-49f7-a407-a18c851e91f8'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2077.137374] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec082b2-25a2-46b7-9255-9f9b0aa38170 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.159099] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47344349-f389-4b40-8dd2-a810ab633448 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.166093] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31aaab5f-fccf-42c0-adf5-6175870209d5 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.186077] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13892d1-3b61-4276-a8b4-e4d97eecbd06 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.200823] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] The volume has not been displaced from its original location: [datastore1] volume-5b2dd21b-c363-49f7-a407-a18c851e91f8/volume-5b2dd21b-c363-49f7-a407-a18c851e91f8.vmdk. No consolidation needed. {{(pid=62820) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2077.205917] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfiguring VM instance instance-0000007c to detach disk 2002 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2077.206194] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d9d3b4e-9be0-4bfd-a139-616655e1170d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.223725] env[62820]: DEBUG oslo_vmware.api [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2077.223725] env[62820]: value = "task-1696760" [ 2077.223725] env[62820]: _type = "Task" [ 2077.223725] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.230895] env[62820]: DEBUG oslo_vmware.api [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696760, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.597222] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Releasing lock "refresh_cache-8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" {{(pid=62820) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.597666] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updated the network info_cache for instance {{(pid=62820) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10357}} [ 2077.597666] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.597766] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.597884] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.598046] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.598196] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.598339] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.598471] env[62820]: DEBUG nova.compute.manager [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62820) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10905}} [ 2077.598613] env[62820]: DEBUG oslo_service.periodic_task [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62820) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.733689] env[62820]: DEBUG oslo_vmware.api [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696760, 'name': ReconfigVM_Task, 'duration_secs': 0.21793} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.733947] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Reconfigured VM instance instance-0000007c to detach disk 2002 {{(pid=62820) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2077.738620] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1354daae-8e15-4907-b788-076b4e26dc2f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.753649] env[62820]: DEBUG oslo_vmware.api [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2077.753649] env[62820]: value = "task-1696761" [ 2077.753649] env[62820]: _type = "Task" [ 2077.753649] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.761047] env[62820]: DEBUG oslo_vmware.api [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.101791] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.102090] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.102267] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.102467] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62820) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2078.103342] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cd64a5-6fc5-4230-a165-1bad8a28a2b2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.111314] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4068f3fd-6702-4631-a976-b24913ef9aa9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.126516] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f5085a-801d-492b-8f9d-545152e968b9 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.132735] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b487b6bf-5e10-4df8-8302-65cfc4e0b960 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.161834] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181176MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=62820) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2078.162184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.162184] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.263762] env[62820]: DEBUG oslo_vmware.api [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696761, 'name': ReconfigVM_Task, 'duration_secs': 0.134717} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.264058] env[62820]: DEBUG nova.virt.vmwareapi.volumeops [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353717', 'volume_id': '5b2dd21b-c363-49f7-a407-a18c851e91f8', 'name': 'volume-5b2dd21b-c363-49f7-a407-a18c851e91f8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418', 'attached_at': '', 'detached_at': '', 'volume_id': '5b2dd21b-c363-49f7-a407-a18c851e91f8', 'serial': '5b2dd21b-c363-49f7-a407-a18c851e91f8'} {{(pid=62820) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2078.806957] env[62820]: DEBUG nova.objects.instance [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'flavor' on Instance uuid 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2079.187673] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Instance 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62820) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2079.187888] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2079.188915] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62820) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2079.214398] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05c4240-7a32-4617-85e9-274f404d5ad2 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.221936] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fd2ad5-9a16-4e3d-b90e-a2edbd4dee36 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.252392] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b24b865-16cb-45a8-acb4-5c98cb6be914 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.259765] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ef6546-35f7-4a23-9325-a66151d18257 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.272790] env[62820]: DEBUG nova.compute.provider_tree [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2079.775888] env[62820]: DEBUG nova.scheduler.client.report [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2079.813452] env[62820]: DEBUG oslo_concurrency.lockutils [None req-7109be95-bf7d-4f3b-b64f-77ac34966e6a tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.212s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.281046] env[62820]: DEBUG nova.compute.resource_tracker [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62820) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2080.281331] env[62820]: DEBUG oslo_concurrency.lockutils [None req-fd158eaf-304f-447d-b839-cb62afc88e08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.119s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.969063] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.969063] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.969063] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.969529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.969529] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.971829] env[62820]: INFO nova.compute.manager [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Terminating instance [ 2081.475595] env[62820]: DEBUG nova.compute.manager [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Start destroying the instance on the hypervisor. {{(pid=62820) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2081.475862] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Destroying instance {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2081.476756] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498b6596-7f35-4f54-85b7-5ccca9f99e61 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.484681] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Powering off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2081.484910] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07957d5d-68cf-4dd5-aace-f5db8bd34f60 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.492093] env[62820]: DEBUG oslo_vmware.api [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2081.492093] env[62820]: value = "task-1696762" [ 2081.492093] env[62820]: _type = "Task" [ 2081.492093] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.499478] env[62820]: DEBUG oslo_vmware.api [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.002456] env[62820]: DEBUG oslo_vmware.api [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696762, 'name': PowerOffVM_Task, 'duration_secs': 0.17457} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.002810] env[62820]: DEBUG nova.virt.vmwareapi.vm_util [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Powered off the VM {{(pid=62820) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2082.003092] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Unregistering the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2082.003225] env[62820]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc8dc597-db11-4188-89ac-fc239c122756 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.079156] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Unregistered the VM {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2082.079395] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Deleting contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2082.079636] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Deleting the datastore file [datastore1] 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2082.079924] env[62820]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4acbd431-cf6a-4843-9aa7-fcb33f528b91 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.086643] env[62820]: DEBUG oslo_vmware.api [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for the task: (returnval){ [ 2082.086643] env[62820]: value = "task-1696764" [ 2082.086643] env[62820]: _type = "Task" [ 2082.086643] env[62820]: } to complete. {{(pid=62820) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.093911] env[62820]: DEBUG oslo_vmware.api [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.597562] env[62820]: DEBUG oslo_vmware.api [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Task: {'id': task-1696764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146733} completed successfully. {{(pid=62820) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.597834] env[62820]: DEBUG nova.virt.vmwareapi.ds_util [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Deleted the datastore file {{(pid=62820) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2082.597992] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Deleted contents of the VM from datastore datastore1 {{(pid=62820) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2082.598199] env[62820]: DEBUG nova.virt.vmwareapi.vmops [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Instance destroyed {{(pid=62820) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2082.598372] env[62820]: INFO nova.compute.manager [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2082.598606] env[62820]: DEBUG oslo.service.loopingcall [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62820) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2082.598793] env[62820]: DEBUG nova.compute.manager [-] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Deallocating network for instance {{(pid=62820) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2082.598887] env[62820]: DEBUG nova.network.neutron [-] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] deallocate_for_instance() {{(pid=62820) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2083.021732] env[62820]: DEBUG nova.compute.manager [req-0d1d4df5-1ae3-4b3d-a1b9-b75115e37329 req-950677e7-a2ed-4b1f-9c31-442328ab1d0c service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Received event network-vif-deleted-256b7e4b-55e2-4fef-9850-258ee7dc7bc1 {{(pid=62820) external_instance_event /opt/stack/nova/nova/compute/manager.py:11489}} [ 2083.021732] env[62820]: INFO nova.compute.manager [req-0d1d4df5-1ae3-4b3d-a1b9-b75115e37329 req-950677e7-a2ed-4b1f-9c31-442328ab1d0c service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Neutron deleted interface 256b7e4b-55e2-4fef-9850-258ee7dc7bc1; detaching it from the instance and deleting it from the info cache [ 2083.022059] env[62820]: DEBUG nova.network.neutron [req-0d1d4df5-1ae3-4b3d-a1b9-b75115e37329 req-950677e7-a2ed-4b1f-9c31-442328ab1d0c service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.496646] env[62820]: DEBUG nova.network.neutron [-] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Updating instance_info_cache with network_info: [] {{(pid=62820) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.524448] env[62820]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3785cd59-f37b-4c0c-86f7-2c2fb95ca68a {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.535388] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf826f9-cb44-4349-9ed5-cd328fe135f0 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.559298] env[62820]: DEBUG nova.compute.manager [req-0d1d4df5-1ae3-4b3d-a1b9-b75115e37329 req-950677e7-a2ed-4b1f-9c31-442328ab1d0c service nova] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Detach interface failed, port_id=256b7e4b-55e2-4fef-9850-258ee7dc7bc1, reason: Instance 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 could not be found. {{(pid=62820) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11323}} [ 2083.999949] env[62820]: INFO nova.compute.manager [-] [instance: 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418] Took 1.40 seconds to deallocate network for instance. [ 2084.506663] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.507081] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.507192] env[62820]: DEBUG nova.objects.instance [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lazy-loading 'resources' on Instance uuid 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 {{(pid=62820) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2085.040776] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fb92e9-8d8a-4cc4-952e-988fdb176fef {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.048661] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad66bdc-e418-4c55-8bee-23282284c68d {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.078583] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebd1814-8647-4cc1-8934-79f93afe0f7f {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.085953] env[62820]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36561bd-51a3-4131-822a-34224f31d9d1 {{(pid=62820) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.098765] env[62820]: DEBUG nova.compute.provider_tree [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Inventory has not changed in ProviderTree for provider: 8a0693d4-1456-4a04-ae15-b1eaea0edd7a {{(pid=62820) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2085.602358] env[62820]: DEBUG nova.scheduler.client.report [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Inventory has not changed for provider 8a0693d4-1456-4a04-ae15-b1eaea0edd7a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62820) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2086.107045] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.126810] env[62820]: INFO nova.scheduler.client.report [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Deleted allocations for instance 8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418 [ 2086.634743] env[62820]: DEBUG oslo_concurrency.lockutils [None req-0345c792-c4a0-408d-9602-7d8bd7f4c03c tempest-AttachVolumeTestJSON-1246148512 tempest-AttachVolumeTestJSON-1246148512-project-member] Lock "8a3e1a76-d2eb-4ca0-a4d1-0b5dd1bba418" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.666s {{(pid=62820) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}